repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values | var_hash
int64 -9,223,186,179,200,150,000
9,223,291,175B
| doc_hash
int64 -9,223,304,365,658,930,000
9,223,309,051B
| line_mean
float64 3.5
99.8
| line_max
int64 13
999
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
aemerick/galaxy_analysis | method_paper_plots/star_abundances.py | 1 | 26128 | from galaxy_analysis.plot.plot_styles import *
import matplotlib.pyplot as plt
import glob
import deepdish as dd
import yt
from galaxy_analysis.utilities import utilities
import numpy as np
from matplotlib.ticker import NullFormatter
from galaxy_analysis.particle_analysis.abundances import single_MDF
#
from galaxy_analysis.analysis import Galaxy
from mpl_toolkits.axes_grid1 import make_axes_locatable
import h5py
# grab the most recent file
workdir = '/mnt/ceph/users/emerick/enzo_runs/pleiades/starIC/run11_30km/final_sndriving/'
#workdir = '/home/emerick/work/enzo_runs/pleiades/starIC/run11_30km/final_sndriving/'
data_files = np.sort(glob.glob(workdir + 'DD????'))
name = data_files[-1].split('final_sndriving/')[1]
gal = Galaxy(name, wdir = workdir)
#
#
#
def plot_alpha_vs_fe():
fig,ax = plt.subplots()
fig.set_size_inches(8,7)
ptype = gal.df['particle_type']
fe_over_h = gal.df[('io','particle_Fe_over_H')]
alpha = gal.df[('io','particle_alpha_over_Fe')]
age = (gal.ds.current_time - gal.df[('io','creation_time')]).convert_to_units('Myr')
age = age - np.min(age)
p = ax.scatter(fe_over_h[ptype==11], alpha[ptype==11],
s = point_size, lw = 2, c = age[ptype==11], cmap = 'plasma_r', alpha = 0.75)
p.set_clim([0.0, np.max(age)])
cb = fig.colorbar(p)
cb.set_label(r'Stellar Age (Myr)')
ax.set_xlim(-9,-1)
ax.set_ylim(-1.75,1.75)
ax.set_xlabel(r'[Fe/H]')
ax.set_ylabel(r'[$\rm \alpha$/Fe]')
plt.minorticks_on()
plt.tight_layout()
fig.savefig('alpha_over_fe.png')
plt.close()
return
def plot_alpha_vs_fe_movie():
times = np.arange(0, 245, 1)
for i, t in enumerate(times):
plot_alpha_vs_fe_with_histograms(t_f = t, image_num = i)
def plot_alpha_vs_fe_with_histograms(t_f = None, image_num = 0):
sep = 0.02
left, width = 0.125, 0.65
bottom, height = 0.1, 0.65
left_h = left + width + sep
bottom_h = bottom + height + sep
rect_scatter = [left,bottom,width,height]
# rect_colorbar =
# rect_histx = [left, bottom_h, width, 0.95 - bottom_h - (left-bottom)]
# rect_histy = [left_h, bottom, 0.95 - left_h, height]
# fig,ax = plt.subplots()
fig = plt.figure(1, figsize=(8,8))
# fig.set_size_inches(8,8)
ax_scatter = plt.axes(rect_scatter)
# ax_hist_x = plt.axes(rect_histx)
# ax_hist_y = plt.axes(rect_histy)
# ax_color = plt.axes(rect_colorbar)
ptype = gal.df['particle_type']
fe_over_h = gal.df[('io','particle_Fe_over_H')]
alpha = gal.df[('io','particle_alpha_over_Fe')]
creation_time = gal.df[('io','creation_time')].convert_to_units('Myr')
age = (gal.ds.current_time - creation_time)
if t_f is None: # plot normally all MS stars
age = age - np.min(age)
# scatter plot
p = ax_scatter.scatter(fe_over_h[ptype==11], alpha[ptype==11],
s = point_size, lw = 2, c = age[ptype==11], cmap = 'plasma_r', alpha = 0.75)
p.set_clim([0.0, np.max(age)])
else:
min_clim = 0.0
max_clim = np.max( age - np.min(age))
particle_lifetimes = gal.df[('io','particle_model_lifetime')].convert_to_units('Myr')
selection = (t_f >= creation_time) * ( t_f < creation_time + particle_lifetimes)
age = t_f - creation_time
if np.size(fe_over_h[selection]) < 1:
plot_fe_over_h = np.ones(np.size(fe_over_h))*(-10000) # make dummy values so plot still diplays, but is empty
plot_alpha = np.ones(np.size(alpha))*(-10000)
plot_age = np.ones(np.size(age))*(-10000)
else:
plot_fe_over_h = fe_over_h[selection]
plot_alpha = alpha[selection]
plot_age = age[selection]
p = ax_scatter.scatter(plot_fe_over_h, plot_alpha, s = point_size, lw = 2,
c = plot_age, cmap = 'plasma_r', alpha = 0.75)
p.set_clim([min_clim,max_clim])
cb = fig.colorbar(p, ax = ax_scatter, orientation = 'horizontal', pad = 0.125, fraction = 0.046,
aspect = 40)
cb.set_label(r'Stellar Age (Myr)')
#
#
ax_scatter.set_xlim(-9,-1)
ax_scatter.set_ylim(-1.75,1.75)
ax_scatter.tick_params(axis='x',which='minor',bottom='on')
ax_scatter.tick_params(axis='y',which='minor',bottom='on')
ax_scatter.set_xlabel(r'[Fe/H]')
ax_scatter.set_ylabel(r'[$\rm \alpha$/Fe]')
plt.minorticks_on()
ax_scatter.plot( ax_scatter.get_xlim(), [0.0,0.0], lw = line_width, color = 'black', ls = '--')
#
# find main plot and construct histograms
#
divider = make_axes_locatable(ax_scatter)
left, bottom, width, height = divider.get_position()
# width, height = divider.get_horizontal(), divider.get_vertical()
sep = 0.01
thickness = np.min( np.array([0.95 - left - width - sep, 0.95 - bottom - height - sep]))
rect_histx = [left, bottom + height + sep, width, thickness]
rect_histy = [left + width + sep, bottom, thickness, height]
ax_hist_x = plt.axes(rect_histx)
ax_hist_y = plt.axes(rect_histy)
nbins = 100
hist,bins = np.histogram(fe_over_h, bins = nbins)
weights = np.ones(np.size(fe_over_h)) * (1.0 / (1.0*np.max(hist)))
ax_hist_x.hist(fe_over_h, color = 'C0', bins = nbins, weights = weights)
if not (t_f is None):
if np.max(plot_fe_over_h) > -1000:
hist,bins = np.histogram(plot_fe_over_h, bins = nbins)
weights = np.ones(np.size(plot_fe_over_h)) * (1.0 / (1.0*np.max(hist)))
ax_hist_x.hist(plot_fe_over_h, color = 'black', bins = nbins, weights = weights,
histtype = 'step', lw = 2.0)
# plot_histogram(ax_hist_x, bins, hist / (1.0*np.max(hist)), color = 'black')
plt.minorticks_on()
# hist,bins = np.histogram(alpha, bins = 24)
# plot_histogram(ax_hist_y, bins, hist / (1.0*np.max(hist)), color = 'black', orientation = 'horizontal')
nbins = 50
hist,bins = np.histogram(alpha, bins = nbins)
weights = np.ones(np.size(fe_over_h)) * (1.0 / (1.0*np.max(hist)))
ax_hist_y.hist(alpha, orientation='horizontal', color = 'C0', bins = nbins, weights = weights)
if not (t_f is None):
if np.max(plot_alpha) > -1000:
hist,bins = np.histogram(plot_alpha, bins = nbins)
weights = np.ones(np.size(plot_alpha)) * (1.0 / (1.0*np.max(hist)))
ax_hist_y.hist(plot_alpha, orientation = 'horizontal', color = 'black', bins = nbins,
weights = weights, histtype='step', lw = 2.0)
ax_hist_x.xaxis.set_major_formatter(NullFormatter())
ax_hist_y.yaxis.set_major_formatter(NullFormatter())
ax_hist_x.set_xlim(ax_scatter.get_xlim())
ax_hist_y.set_ylim(ax_scatter.get_ylim())
ticks = [0.0,0.25,0.5,0.75,1.0]
ax_hist_x.set_yticks(ticks)
ax_hist_y.set_xticks(ticks)
ax_hist_y.set_xticklabels(ticks, rotation = 270)
plt.minorticks_on()
# plt.tight_layout()
if t_f is None:
fig.savefig('alpha_over_fe_hist.png')
else:
fig.savefig('alpha_movie/alpha_over_fe_hist_%0004i.png'%(image_num))
plt.close()
return
def plot_panel(A = 'Fe', B = 'Fe', C = 'H', color = True):
"""
Make panel plots of X/A vs. B/C where "X" is a loop through all elements available,
and A, B, C are fixed for all plots, chosen by user. Defualt will plot
[X/Fe] vs. [Fe/H]. Default behavior is to color points by age.
"""
filename = workdir + '/abundances/abundances/abundances.h5'
hdf5_data = h5py.File(filename, 'r')
dfiles = hdf5_data.keys()
dfile = dfiles[-1] # do this with most recent data file
data = dd.io.load(filename, '/' + str(dfile))
elements = utilities.sort_by_anum([x for x in data['abundances'].keys() if (not 'alpha' in x)])
elements = elements + ['alpha']
age = data['Time'] - data['creation_time'] # age of all particles in this data set
for base in ['H','Fe']:
fig, ax = plt.subplots(4,4, sharex = True, sharey = True)
fig.set_size_inches(4*4,4*4)
fig.subplots_adjust(hspace=0.0, wspace = 0.0)
if base == 'Fe':
bins = np.arange(-3,3.1,0.1)
else:
bins = np.arange(-9,0,0.1)
i,j = 0,0
for e in elements:
if (A == e): # skip
continue
index = (i,j)
y = np.array(data['abundances'][e][A])
x = np.array(data['abundances'][B][C])
p = ax[index].scatter(x, y, s = point_size*0.5,
lw = 2, c = age, cmap = 'plasma_r', alpha = 0.75)
p.set_clim([0.0, np.max(age)])
xy = (0.8,0.8)
ax[index].annotate(e, xy=xy, xytext=xy, xycoords = 'axes fraction',
textcoords = 'axes fraction')
# cb = fig.colorbar(p)
# cb.set_label(r'Stellar Age (Myr)')
j = j + 1
if j >= 4:
j = 0
i = i + 1
for i in np.arange(4):
ax[(3,i)].set_xlabel(r'log([' + B + '/' + C + '])')
ax[(i,0)].set_ylabel(r'log([X/' + A + '])')
if C == 'H':
ax[(i,0)].set_xlim(-10.25, 0.125)
else:
ax[(i,0)].set_xlim(-3.25, 3.25)
if A == 'H':
ax[(0,i)].set_ylim(-10.25, 0.125)
else:
ax[(0,i)].set_ylim(-3.25, 3.25)
for j in np.arange(4):
ax[(j,i)].plot([-10,10], [0.0,0.0], lw = 0.5 * line_width, ls = ':', color = 'black')
plt.minorticks_on()
fig.savefig('X_over_' + A +'_vs_' + B + '_over_' + C + '_panel.png')
plt.close()
return
def plot_spatial_profiles(field = 'metallicity', abundance = False,
bins = None, spatial_type = 'cylindrical_radius'):
filename = workdir + '/abundances/abundances/abundances.h5'
hdf5_data = h5py.File(filename, 'r')
dfiles = hdf5_data.keys()
dfile = dfiles[-1] # do this with most recent data file
data = dd.io.load(filename, '/' + str(dfile))
elements = utilities.sort_by_anum([x for x in data['abundances'].keys() if (not 'alpha' in x)])
elements = elements + ['alpha']
if spatial_type == 'cylindrical_radius':
bin_field = np.sqrt(data['kinematics']['x']**2 + data['kinematics']['y']**2)
xlabel = r'Radius (pc)'
elif spatial_type == 'z':
bin_field = np.abs( data['kinematics']['z'] )
xlabel = r'Z (pc)'
if bins is None:
bins = np.linspace(np.floor(np.min(bin_field)), np.ceil(np.max(bin_field)), 100)
centers = 0.5 * (bins[1:] + bins[:-1])
nbins = np.size(bins)
hist_index = np.digitize(bin_field, bins = bins)
median, q1, q3 = np.zeros(nbins-1), np.zeros(nbins-1), np.zeros(nbins-1)
if field == 'metallicity':
# make a single plot
# bin the data
for i in np.arange(nbins-1):
x = data['metallicity'][hist_index == i + 1]
median[i] = np.median(x)
if np.size(x) > 1:
q1[i] = np.percentile(x, 25.0)
q3[i] = np.percentile(x, 75.0)
elif np.size(x) == 1:
q1[i] = median[i]
q3[i] = median[i]
# now plot
fig, ax = plt.subplots()
fig.set_size_inches(8,8)
plot_histogram(ax, bins, median, lw = line_width, color = 'black', ls = '-')
ax.fill_between(centers, q1, q3, lw = 1.5, color = 'grey')
ax.set_ylabel(r'Metallicity Fraction')
ax.set_xlabel(xlabel)
ax.set_xlim( np.min(bins), np.max(bins))
plt.tight_layout()
plt.minorticks_on()
fig.savefig('metallicity_' + spatial_type + '_profile.png')
plt.close()
elif abundance:
fig, ax = plt.subplots(4,4, sharex = True, sharey = True)
fig.set_size_inches(16,16)
fig.subplots_adjust(hspace = 0.0, wspace = 0.0)
axi, axj = 0,0
for e in elements:
if field == e:
continue
index = (axi,axj)
for i in np.arange(nbins-1):
x = np.array(data['abundances'][e][field])
x = x[ hist_index == (i + 1)]
if np.size(x) > 0:
median[i] = np.median(x)
q1[i] = np.percentile(x, 25)
q3[i] = np.percentile(x, 75)
else:
median[i] = None; q1[i] = None; q3[i] = None
ax[index].annotate(e, xy=(0.8,0.8),xytext=(0.8,0.8),
xycoords='axes fraction',textcoords = 'axes fraction')
plot_histogram(ax[index], bins, median, lw = line_width, color = 'black', ls = '-')
ax[index].fill_between(centers,q1,q3,lw=1.5,color='grey')
axj = axj+1
if axj>=4:
axj = 0
axi = axi + 1
for i in np.arange(4):
ax[(3,i)].set_xlabel(xlabel)
ax[(i,0)].set_ylabel(r'log[X/' + field +'])')
if field == 'H':
ax[(0,i)].set_ylim(-10.25,0.125)
else:
ax[(0,i)].set_ylim(-3.25,3.25)
for j in np.arange(4):
ax[(j,i)].plot([bins[0],bins[-1]], [0.0,0.0], lw = 0.5 * line_width, ls = '--',color ='black')
ax[(i,0)].set_xlim(np.min(bins), np.max(bins))
plt.minorticks_on()
fig.savefig(field + '_' + spatial_type + '_profile_panel.png')
plt.close()
return
def plot_MDF(plot_base = ['H','Fe']):
"""
Make a panel plot of the time evolution of all elemental abundance ratios
with respect to both H and Fe (as separate plots)
"""
if (not (type(plot_base) is list)):
plot_base = [plot_base]
filename = workdir + '/abundances/abundances/abundances.h5'
hdf5_data = h5py.File(filename, 'r')
dfiles = hdf5_data.keys()
dfile = dfiles[-1] # do this with most recent data file
data = dd.io.load(filename, '/' + str(dfile))
elements = utilities.sort_by_anum([x for x in data['abundances'].keys() if (not 'alpha' in x)])
elements = elements + ['alpha']
for base in plot_base:
fig, ax = plt.subplots(4,4, sharex = True, sharey = True)
fig.set_size_inches(4*4,4*4)
fig.subplots_adjust(hspace=0.0, wspace = 0.0)
if base == 'Fe':
bins = np.arange(-3,3.1,0.1)
else:
bins = np.arange(-9,0,0.1)
i,j = 0,0
for e in elements:
if (base == e):
continue
index = (i,j)
points = np.array(data['abundances'][e][base])
single_MDF(points, bins = bins, norm = 'peak', ax = ax[index],
label = False, lw = line_width)
x = np.max(bins) - (0.25/6.0 * (bins[-1] - bins[0]))
y = 0.9
ax[index].annotate(e, xy = (x,y), xytext =(x,y))
ax[index].plot([0,0], [0.0,1.0], ls = ':', lw = 0.5 * line_width, color = 'black')
j = j + 1
if j >= 4:
j = 0
i = i + 1
for i in np.arange(4):
ax[(3,i)].set_xlabel(r'log([X/' + base + '])')
ax[(i,0)].set_ylabel(r'N/N$_{\rm peak}$')
if base == 'H':
ax[(i,0)].set_xlim(-10.25, 0.125)
elif base == 'Fe':
ax[(i,0)].set_xlim(-3.25, 3.25)
plt.minorticks_on()
fig.savefig(base + '_MDF.png')
plt.close()
return
def plot_time_evolution():
"""
Make a panel plot of the time evolution of all elemental abundance ratios
with respect to both H and Fe (as separate plots)
"""
filename = workdir + '/abundances/abundances/abundances.h5'
hdf5_data = h5py.File(filename, 'r')
dfiles = hdf5_data.keys()
dfile = dfiles[-1] # do this with most recent data file
data = dd.io.load(filename, '/' + str(dfile))
elements = utilities.sort_by_anum([x for x in data['abundances'].keys() if (not 'alpha' in x)])
elements = elements + ['alpha']
for time_type in ['cumulative','10Myr']:
for base in ['H','Fe']:
fig, ax = plt.subplots(4,4, sharex = True, sharey = True)
fig.set_size_inches(4*4,4*4)
fig.subplots_adjust(hspace=0.0, wspace = 0.0)
i,j = 0,0
for e in elements:
if (base == e):
continue
print("plotting " + e + "/" + base + " time evolution")
index = (i,j)
t = data['statistics'][time_type]['bins']
y = data['statistics'][time_type][e][base]['median']
Q1 = data['statistics'][time_type][e][base]['Q1']
Q3 = data['statistics'][time_type][e][base]['Q3']
select = (y*0 == 0) # remove nan values
t = t[select]
t = t - t[0]
ax[index].plot( t, y[select], lw = line_width, ls = '-', color = 'black', label = r' ' + e +' ')
ax[index].fill_between(t, Q1[select], Q3[select], color = 'black', alpha = 0.5, lw = 0.5 * line_width)
ax[index].set_xlim(0.0, np.max(t))
ax[index].plot( [0.0,1000.0], [0.0,0.0], ls = ':', color = 'black', lw = line_width)
ax[index].legend(loc = 'upper right')
j = j + 1
if j >= 4:
j = 0
i = i + 1
for i in np.arange(4):
ax[(3,i)].set_xlabel(r'Time (Myr)')
ax[(i,0)].set_ylabel(r'[X/' + base +']')
if base == 'H':
ax[(i,0)].set_ylim(-12.25, 0.125)
elif base == 'Fe':
ax[(i,0)].set_ylim(-3.25, 3.25)
# for j in np.arange(3):
# ax[(j,i)].set_xticklabels([])
# ax[(i,j+1)].set_yticklabels([])
# ax[(3,i)].set_xticklabels(np.arange(0,np.max(t)+20,20))
# if base == 'Fe':
# ax[(i,0)].set_yticklabels([-3,-2,-1,0,1,2,3,])
# else:
# ax[(i,0)].set_yticklabels([-12, -10, -8, -6, -4, -2, 0])
plt.minorticks_on()
fig.savefig('stellar_x_over_' + base + '_' + time_type +'_evolution.png')
plt.close()
return
def plot_mass_fraction_time_evolution():
"""
Make a panel plot of the time evolution of all elemental abundance ratios
with respect to both H and Fe (as separate plots)
"""
filename = workdir + '/abundances/abundances/abundances.h5'
hdf5_data = h5py.File(filename, 'r')
dfiles = hdf5_data.keys()
dfile = dfiles[-1] # do this with most recent data file
data = dd.io.load(filename, '/' + str(dfile))
elements = utilities.sort_by_anum([x for x in data['abundances'].keys() if (not 'alpha' in x)])
# elements = elements + ['alpha']
for time_type in ['cumulative','10Myr']:
fig, ax = plt.subplots(4,4, sharex = True, sharey = True)
fig.set_size_inches(4*4,4*4)
fig.subplots_adjust(hspace=0.0, wspace = 0.0)
i,j = 0,0
for e in elements:
print("plotting " + e + "mass fraction time evolution")
index = (i,j)
t = data['mass_fraction_statistics'][time_type]['bins']
y = data['mass_fraction_statistics'][time_type][e]['median']
Q1 = data['mass_fraction_statistics'][time_type][e]['Q1']
Q3 = data['mass_fraction_statistics'][time_type][e]['Q3']
select = (y*0 == 0) # remove nan values
t = t[select]
t = t - t[0]
ax[index].plot( t, y[select], lw = line_width, ls = '-', color = 'black', label = r' ' + e +' ')
ax[index].fill_between(t, Q1[select], Q3[select], color = 'black', alpha = 0.5, lw = 0.5 * line_width)
ax[index].set_xlim(0.0, np.max(t))
ax[index].plot( [0.0,1000.0], [0.0,0.0], ls = ':', color = 'black', lw = line_width)
ax[index].legend(loc = 'upper right')
j = j + 1
if j >= 4:
j = 0
i = i + 1
for i in np.arange(4):
ax[(3,i)].set_xlabel(r'Time (Myr)')
ax[(i,0)].set_ylabel(r'log(X Mass Fraction)')
ax[(i,0)].set_ylim(1.0E-10, 1.0E-4)
ax[(i,0)].semilogy()
# for j in np.arange(3):
# ax[(j,i)].set_xticklabels([])
# ax[(i,j+1)].set_yticklabels([])
# ax[(3,i)].set_xticklabels(np.arange(0,np.max(t)+20,20))
# if base == 'Fe':
# ax[(i,0)].set_yticklabels([-3,-2,-1,0,1,2,3,])
# else:
# ax[(i,0)].set_yticklabels([-12, -10, -8, -6, -4, -2, 0])
plt.minorticks_on()
fig.savefig('stellar_mass_fraction_' + time_type +'_evolution.png')
plt.close()
return
def plot_ratios_with_histograms(X='alpha',A='Fe',B='Fe',C='H'):
filename = workdir + '/abundances/abundances/abundances.h5'
hdf5_data = h5py.File(filename, 'r')
dfiles = hdf5_data.keys()
dfile = dfiles[-1] # do this with most recent data file
data = dd.io.load(filename, '/' + str(dfile))
elements = utilities.sort_by_anum([x for x in data['abundances'].keys() if (not 'alpha' in x)])
elements = elements + ['alpha'] + ['H']
age = data['Time'] - data['creation_time'] # age of all particles in this data set
# --------------------
check_elements = [x for x in [X,A,B,C] if (not (x in elements))]
if len(check_elements) > 0:
print(check_elements, " not in elements list")
print("available: ", elements)
raise ValueError
sep = 0.02
left, width = 0.125, 0.65
bottom, height = 0.1, 0.65
left_h = left + width + sep
bottom_h = bottom + height + sep
rect_scatter = [left,bottom,width,height]
# rect_colorbar =
# rect_histx = [left, bottom_h, width, 0.95 - bottom_h - (left-bottom)]
# rect_histy = [left_h, bottom, 0.95 - left_h, height]
# fig,ax = plt.subplots()
fig = plt.figure(1, figsize=(8,8))
# fig.set_size_inches(8,8)
ax_scatter = plt.axes(rect_scatter)
# ax_hist_x = plt.axes(rect_histx)
# ax_hist_y = plt.axes(rect_histy)
# ax_color = plt.axes(rect_colorbar)
x_values = data['abundances'][B][C]
y_values = data['abundances'][X][A]
age = age - np.min(age) # normalize
# scatter plot
p = ax_scatter.scatter(x_values, y_values,
s = point_size, lw = 2, c = age, cmap = 'plasma_r', alpha = 0.75)
p.set_clim([0.0, np.max(age)])
cb = fig.colorbar(p, ax = ax_scatter, orientation = 'horizontal', pad = 0.125, fraction = 0.046,
aspect = 40)
cb.set_label(r'Stellar Age (Myr)')
#
#
#
ax_scatter.set_xlim(-9,-1)
ax_scatter.set_ylim(-1.75,1.75)
ax_scatter.tick_params(axis='x',which='minor',bottom='on')
ax_scatter.tick_params(axis='y',which='minor',bottom='on')
ax_scatter.set_xlabel(r'log([' + B + '/' + C + '])')
ax_scatter.set_ylabel(r'log([' + X + '/' + A + '])')
plt.minorticks_on()
#
# find main plot and construct histograms
#
divider = make_axes_locatable(ax_scatter)
left, bottom, width, height = divider.get_position()
# width, height = divider.get_horizontal(), divider.get_vertical()
sep = 0.01
thickness = np.min( np.array([0.95 - left - width - sep, 0.95 - bottom - height - sep]))
rect_histx = [left, bottom + height + sep, width, thickness]
rect_histy = [left + width + sep, bottom, thickness, height]
ax_hist_x = plt.axes(rect_histx)
ax_hist_y = plt.axes(rect_histy)
# construct the histogram for the horizontal axis (goes up top)
nbins = 100
hist,bins = np.histogram(x_values, bins = nbins)
weights = np.ones(np.size(x_values)) * (1.0 / (1.0*np.max(hist)))
ax_hist_x.hist(x_values, color = 'C0', bins = nbins, weights = weights)
# plot_histogram(ax_hist_x, bins, hist / (1.0*np.max(hist)), color = 'black')
plt.minorticks_on()
# hist,bins = np.histogram(alpha, bins = 24)
# plot_histogram(ax_hist_y, bins, hist / (1.0*np.max(hist)), color = 'black', orientation = 'horizontal')
# now do the same for the vertical axis histogram
nbins = 50
hist,bins = np.histogram(y_values, bins = nbins)
weights = np.ones(np.size(y_values)) * (1.0 / (1.0*np.max(hist)))
ax_hist_y.hist(y_values, orientation='horizontal', color = 'C0', bins = nbins, weights = weights)
ax_hist_x.xaxis.set_major_formatter(NullFormatter())
ax_hist_y.yaxis.set_major_formatter(NullFormatter())
ax_hist_x.set_xlim(ax_scatter.get_xlim())
ax_hist_y.set_ylim(ax_scatter.get_ylim())
ticks = [0.0,0.25,0.5,0.75,1.0]
ax_hist_x.set_yticks(ticks)
ax_hist_y.set_xticks(ticks)
ax_hist_y.set_xticklabels(ticks, rotation = 270)
plt.minorticks_on()
# plt.tight_layout()
fig.savefig(X + '_over_' + A + '_vs_' + B + '_over_' + C + '_hist.png')
plt.close()
return
if __name__ == '__main__':
plot_mass_fraction_time_evolution() #
# plot_ratios_with_histograms('C','O','Fe','H') # C/O vs Fe/H
# plot_ratios_with_histograms('alpha','Mg','Mg','H')
# plot_ratios_with_histograms('alpha','Fe','Fe','H')
# plot_panel() # default [X/Fe] vs [Fe/H]
# plot_panel(A = 'Mg', B = 'Fe', C = 'H')
# plot_panel(A = 'Mg', B = 'Mg', C = 'Fe')
# plot_panel(A = 'O', B = 'Fe', C = 'H')
# plot_panel(A = 'O', B = 'O', C = 'Fe')
# plot_panel(A = 'Ba', B = 'Ba', C = 'Fe')
# plot_MDF(plot_base = ['H','Fe','O','Ba'])
# plot_time_evolution()
# plot_alpha_vs_fe_with_histograms()
# plot_alpha_vs_fe()
# plot_alpha_vs_fe_movie()
# plot_spatial_profiles(bins=np.arange(0,505,10))
# plot_spatial_profiles(field = 'Fe',abundance=True, bins = np.arange(0,505,10))
# plot_spatial_profiles(field = 'H', abundance=True, bins = np.arange(0,505,10))
| mit | 2,381,731,867,709,139,000 | -820,643,153,997,143,600 | 35.542657 | 121 | 0.530618 | false |
agrista/odoo-saas | addons/marketing_campaign/__openerp__.py | 260 | 3127 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Marketing Campaigns',
'version': '1.1',
'depends': ['marketing',
'document',
'email_template',
'decimal_precision'
],
'author': 'OpenERP SA',
'category': 'Marketing',
'description': """
This module provides leads automation through marketing campaigns (campaigns can in fact be defined on any resource, not just CRM Leads).
=========================================================================================================================================
The campaigns are dynamic and multi-channels. The process is as follows:
------------------------------------------------------------------------
* Design marketing campaigns like workflows, including email templates to
send, reports to print and send by email, custom actions
* Define input segments that will select the items that should enter the
campaign (e.g leads from certain countries.)
* Run your campaign in simulation mode to test it real-time or accelerated,
and fine-tune it
* You may also start the real campaign in manual mode, where each action
requires manual validation
* Finally launch your campaign live, and watch the statistics as the
campaign does everything fully automatically.
While the campaign runs you can of course continue to fine-tune the parameters,
input segments, workflow.
**Note:** If you need demo data, you can install the marketing_campaign_crm_demo
module, but this will also install the CRM application as it depends on
CRM Leads.
""",
'website': 'https://www.odoo.com/page/lead-automation',
'data': [
'marketing_campaign_view.xml',
'marketing_campaign_data.xml',
'marketing_campaign_workflow.xml',
'report/campaign_analysis_view.xml',
'security/marketing_campaign_security.xml',
'security/ir.model.access.csv'
],
'demo': ['marketing_campaign_demo.xml'],
'test': ['test/marketing_campaign.yml'],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -7,469,663,818,279,918,000 | 4,959,606,231,452,582,000 | 43.042254 | 137 | 0.606652 | false |
rychipman/mongo-python-driver | pymongo/read_preferences.py | 27 | 12784 | # Copyright 2012-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License",
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for choosing which member of a replica set to read from."""
from collections import Mapping
from pymongo.errors import ConfigurationError
from pymongo.server_selectors import (member_with_tags_server_selector,
secondary_with_tags_server_selector,
writable_server_selector)
_PRIMARY = 0
_PRIMARY_PREFERRED = 1
_SECONDARY = 2
_SECONDARY_PREFERRED = 3
_NEAREST = 4
_MONGOS_MODES = (
'primary',
'primaryPreferred',
'secondary',
'secondaryPreferred',
'nearest',
)
def _validate_tag_sets(tag_sets):
"""Validate tag sets for a MongoReplicaSetClient.
"""
if tag_sets is None:
return tag_sets
if not isinstance(tag_sets, list):
raise TypeError((
"Tag sets %r invalid, must be a list") % (tag_sets,))
if len(tag_sets) == 0:
raise ValueError((
"Tag sets %r invalid, must be None or contain at least one set of"
" tags") % (tag_sets,))
for tags in tag_sets:
if not isinstance(tags, Mapping):
raise TypeError(
"Tag set %r invalid, must be an instance of dict, "
"bson.son.SON or other type that inherits from "
"collection.Mapping" % (tags,))
return tag_sets
class _ServerMode(object):
"""Base class for all read preferences.
"""
__slots__ = ("__mongos_mode", "__mode", "__tag_sets")
def __init__(self, mode, tag_sets=None):
if mode == _PRIMARY and tag_sets is not None:
raise ConfigurationError("Read preference primary "
"cannot be combined with tags")
self.__mongos_mode = _MONGOS_MODES[mode]
self.__mode = mode
self.__tag_sets = _validate_tag_sets(tag_sets)
@property
def name(self):
"""The name of this read preference.
"""
return self.__class__.__name__
@property
def document(self):
"""Read preference as a document.
"""
if self.__tag_sets in (None, [{}]):
return {'mode': self.__mongos_mode}
return {'mode': self.__mongos_mode, 'tags': self.__tag_sets}
@property
def mode(self):
"""The mode of this read preference instance.
"""
return self.__mode
@property
def tag_sets(self):
"""Set ``tag_sets`` to a list of dictionaries like [{'dc': 'ny'}] to
read only from members whose ``dc`` tag has the value ``"ny"``.
To specify a priority-order for tag sets, provide a list of
tag sets: ``[{'dc': 'ny'}, {'dc': 'la'}, {}]``. A final, empty tag
set, ``{}``, means "read from any member that matches the mode,
ignoring tags." MongoReplicaSetClient tries each set of tags in turn
until it finds a set of tags with at least one matching member.
.. seealso:: `Data-Center Awareness
<http://www.mongodb.org/display/DOCS/Data+Center+Awareness>`_
"""
return list(self.__tag_sets) if self.__tag_sets else [{}]
def __repr__(self):
return "%s(tag_sets=%r)" % (
self.name, self.__tag_sets)
def __eq__(self, other):
if isinstance(other, _ServerMode):
return (self.mode == other.mode and
self.tag_sets == other.tag_sets)
return NotImplemented
def __ne__(self, other):
return not self == other
def __getstate__(self):
"""Return value of object for pickling.
Needed explicitly because __slots__() defined.
"""
return {'mode': self.__mode, 'tag_sets': self.__tag_sets}
def __setstate__(self, value):
"""Restore from pickling."""
self.__mode = value['mode']
self.__mongos_mode = _MONGOS_MODES[self.__mode]
self.__tag_sets = _validate_tag_sets(value['tag_sets'])
class Primary(_ServerMode):
"""Primary read preference.
* When directly connected to one mongod queries are allowed if the server
is standalone or a replica set primary.
* When connected to a mongos queries are sent to the primary of a shard.
* When connected to a replica set queries are sent to the primary of
the replica set.
"""
def __init__(self):
super(Primary, self).__init__(_PRIMARY)
def __call__(self, server_descriptions):
"""Return matching ServerDescriptions from a list."""
return writable_server_selector(server_descriptions)
def __repr__(self):
return "Primary()"
def __eq__(self, other):
if isinstance(other, _ServerMode):
return other.mode == _PRIMARY
return NotImplemented
class PrimaryPreferred(_ServerMode):
"""PrimaryPreferred read preference.
* When directly connected to one mongod queries are allowed to standalone
servers, to a replica set primary, or to replica set secondaries.
* When connected to a mongos queries are sent to the primary of a shard if
available, otherwise a shard secondary.
* When connected to a replica set queries are sent to the primary if
available, otherwise a secondary.
:Parameters:
- `tag_sets`: The :attr:`~tag_sets` to use if the primary is not
available.
"""
def __init__(self, tag_sets=None):
super(PrimaryPreferred, self).__init__(_PRIMARY_PREFERRED, tag_sets)
def __call__(self, server_descriptions):
"""Return matching ServerDescriptions from a list."""
writable_servers = writable_server_selector(server_descriptions)
if writable_servers:
return writable_servers
else:
return secondary_with_tags_server_selector(
self.tag_sets,
server_descriptions)
class Secondary(_ServerMode):
"""Secondary read preference.
* When directly connected to one mongod queries are allowed to standalone
servers, to a replica set primary, or to replica set secondaries.
* When connected to a mongos queries are distributed among shard
secondaries. An error is raised if no secondaries are available.
* When connected to a replica set queries are distributed among
secondaries. An error is raised if no secondaries are available.
:Parameters:
- `tag_sets`: The :attr:`~tag_sets` to use with this read_preference
"""
def __init__(self, tag_sets=None):
super(Secondary, self).__init__(_SECONDARY, tag_sets)
def __call__(self, server_descriptions):
"""Return matching ServerDescriptions from a list."""
return secondary_with_tags_server_selector(
self.tag_sets,
server_descriptions)
class SecondaryPreferred(_ServerMode):
"""SecondaryPreferred read preference.
* When directly connected to one mongod queries are allowed to standalone
servers, to a replica set primary, or to replica set secondaries.
* When connected to a mongos queries are distributed among shard
secondaries, or the shard primary if no secondary is available.
* When connected to a replica set queries are distributed among
secondaries, or the primary if no secondary is available.
:Parameters:
- `tag_sets`: The :attr:`~tag_sets` to use with this read_preference
"""
def __init__(self, tag_sets=None):
super(SecondaryPreferred, self).__init__(_SECONDARY_PREFERRED, tag_sets)
def __call__(self, server_descriptions):
"""Return matching ServerDescriptions from a list."""
secondaries = secondary_with_tags_server_selector(
self.tag_sets,
server_descriptions)
if secondaries:
return secondaries
else:
return writable_server_selector(server_descriptions)
class Nearest(_ServerMode):
"""Nearest read preference.
* When directly connected to one mongod queries are allowed to standalone
servers, to a replica set primary, or to replica set secondaries.
* When connected to a mongos queries are distributed among all members of
a shard.
* When connected to a replica set queries are distributed among all
members.
:Parameters:
- `tag_sets`: The :attr:`~tag_sets` to use with this read_preference
"""
def __init__(self, tag_sets=None):
super(Nearest, self).__init__(_NEAREST, tag_sets)
def __call__(self, server_descriptions):
"""Return matching ServerDescriptions from a list."""
return member_with_tags_server_selector(
self.tag_sets or [{}],
server_descriptions)
_ALL_READ_PREFERENCES = (Primary, PrimaryPreferred,
Secondary, SecondaryPreferred, Nearest)
def make_read_preference(mode, tag_sets):
if mode == _PRIMARY:
if tag_sets not in (None, [{}]):
raise ConfigurationError("Read preference primary "
"cannot be combined with tags")
return Primary()
return _ALL_READ_PREFERENCES[mode](tag_sets)
_MODES = (
'PRIMARY',
'PRIMARY_PREFERRED',
'SECONDARY',
'SECONDARY_PREFERRED',
'NEAREST',
)
class ReadPreference(object):
"""An enum that defines the read preference modes supported by PyMongo.
See :doc:`/examples/high_availability` for code examples.
A read preference is used in three cases:
:class:`~pymongo.mongo_client.MongoClient` connected to a single mongod:
- ``PRIMARY``: Queries are allowed if the server is standalone or a replica
set primary.
- All other modes allow queries to standalone servers, to a replica set
primary, or to replica set secondaries.
:class:`~pymongo.mongo_client.MongoClient` initialized with the
``replicaSet`` option:
- ``PRIMARY``: Read from the primary. This is the default, and provides the
strongest consistency. If no primary is available, raise
:class:`~pymongo.errors.AutoReconnect`.
- ``PRIMARY_PREFERRED``: Read from the primary if available, or if there is
none, read from a secondary.
- ``SECONDARY``: Read from a secondary. If no secondary is available,
raise :class:`~pymongo.errors.AutoReconnect`.
- ``SECONDARY_PREFERRED``: Read from a secondary if available, otherwise
from the primary.
- ``NEAREST``: Read from any member.
:class:`~pymongo.mongo_client.MongoClient` connected to a mongos, with a
sharded cluster of replica sets:
- ``PRIMARY``: Read from the primary of the shard, or raise
:class:`~pymongo.errors.OperationFailure` if there is none.
This is the default.
- ``PRIMARY_PREFERRED``: Read from the primary of the shard, or if there is
none, read from a secondary of the shard.
- ``SECONDARY``: Read from a secondary of the shard, or raise
:class:`~pymongo.errors.OperationFailure` if there is none.
- ``SECONDARY_PREFERRED``: Read from a secondary of the shard if available,
otherwise from the shard primary.
- ``NEAREST``: Read from any shard member.
"""
PRIMARY = Primary()
PRIMARY_PREFERRED = PrimaryPreferred()
SECONDARY = Secondary()
SECONDARY_PREFERRED = SecondaryPreferred()
NEAREST = Nearest()
def read_pref_mode_from_name(name):
"""Get the read preference mode from mongos/uri name.
"""
return _MONGOS_MODES.index(name)
class MovingAverage(object):
"""Tracks an exponentially-weighted moving average."""
def __init__(self):
self.average = None
def add_sample(self, sample):
if sample < 0:
# Likely system time change while waiting for ismaster response
# and not using time.monotonic. Ignore it, the next one will
# probably be valid.
return
if self.average is None:
self.average = sample
else:
# The Server Selection Spec requires an exponentially weighted
# average with alpha = 0.2.
self.average = 0.8 * self.average + 0.2 * sample
def get(self):
"""Get the calculated average, or None if no samples yet."""
return self.average
def reset(self):
self.average = None
| apache-2.0 | -9,074,759,348,707,843,000 | -1,326,356,303,919,767,300 | 32.642105 | 80 | 0.632979 | false |
cainmatt/django | django/contrib/gis/gdal/srs.py | 366 | 12043 | """
The Spatial Reference class, represents OGR Spatial Reference objects.
Example:
>>> from django.contrib.gis.gdal import SpatialReference
>>> srs = SpatialReference('WGS84')
>>> print(srs)
GEOGCS["WGS 84",
DATUM["WGS_1984",
SPHEROID["WGS 84",6378137,298.257223563,
AUTHORITY["EPSG","7030"]],
TOWGS84[0,0,0,0,0,0,0],
AUTHORITY["EPSG","6326"]],
PRIMEM["Greenwich",0,
AUTHORITY["EPSG","8901"]],
UNIT["degree",0.01745329251994328,
AUTHORITY["EPSG","9122"]],
AUTHORITY["EPSG","4326"]]
>>> print(srs.proj)
+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs
>>> print(srs.ellipsoid)
(6378137.0, 6356752.3142451793, 298.25722356300003)
>>> print(srs.projected, srs.geographic)
False True
>>> srs.import_epsg(32140)
>>> print(srs.name)
NAD83 / Texas South Central
"""
from ctypes import byref, c_char_p, c_int
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.error import SRSException
from django.contrib.gis.gdal.prototypes import srs as capi
from django.utils import six
from django.utils.encoding import force_bytes, force_text
class SpatialReference(GDALBase):
"""
A wrapper for the OGRSpatialReference object. According to the GDAL Web site,
the SpatialReference object "provide[s] services to represent coordinate
systems (projections and datums) and to transform between them."
"""
def __init__(self, srs_input='', srs_type='user'):
"""
Creates a GDAL OSR Spatial Reference object from the given input.
The input may be string of OGC Well Known Text (WKT), an integer
EPSG code, a PROJ.4 string, and/or a projection "well known" shorthand
string (one of 'WGS84', 'WGS72', 'NAD27', 'NAD83').
"""
if srs_type == 'wkt':
self.ptr = capi.new_srs(c_char_p(b''))
self.import_wkt(srs_input)
return
elif isinstance(srs_input, six.string_types):
# Encoding to ASCII if unicode passed in.
if isinstance(srs_input, six.text_type):
srs_input = srs_input.encode('ascii')
try:
# If SRID is a string, e.g., '4326', then make acceptable
# as user input.
srid = int(srs_input)
srs_input = 'EPSG:%d' % srid
except ValueError:
pass
elif isinstance(srs_input, six.integer_types):
# EPSG integer code was input.
srs_type = 'epsg'
elif isinstance(srs_input, self.ptr_type):
srs = srs_input
srs_type = 'ogr'
else:
raise TypeError('Invalid SRS type "%s"' % srs_type)
if srs_type == 'ogr':
# Input is already an SRS pointer.
srs = srs_input
else:
# Creating a new SRS pointer, using the string buffer.
buf = c_char_p(b'')
srs = capi.new_srs(buf)
# If the pointer is NULL, throw an exception.
if not srs:
raise SRSException('Could not create spatial reference from: %s' % srs_input)
else:
self.ptr = srs
# Importing from either the user input string or an integer SRID.
if srs_type == 'user':
self.import_user_input(srs_input)
elif srs_type == 'epsg':
self.import_epsg(srs_input)
def __del__(self):
"Destroys this spatial reference."
if self._ptr and capi:
capi.release_srs(self._ptr)
def __getitem__(self, target):
"""
Returns the value of the given string attribute node, None if the node
doesn't exist. Can also take a tuple as a parameter, (target, child),
where child is the index of the attribute in the WKT. For example:
>>> wkt = 'GEOGCS["WGS 84", DATUM["WGS_1984, ... AUTHORITY["EPSG","4326"]]'
>>> srs = SpatialReference(wkt) # could also use 'WGS84', or 4326
>>> print(srs['GEOGCS'])
WGS 84
>>> print(srs['DATUM'])
WGS_1984
>>> print(srs['AUTHORITY'])
EPSG
>>> print(srs['AUTHORITY', 1]) # The authority value
4326
>>> print(srs['TOWGS84', 4]) # the fourth value in this wkt
0
>>> print(srs['UNIT|AUTHORITY']) # For the units authority, have to use the pipe symbole.
EPSG
>>> print(srs['UNIT|AUTHORITY', 1]) # The authority value for the units
9122
"""
if isinstance(target, tuple):
return self.attr_value(*target)
else:
return self.attr_value(target)
def __str__(self):
"The string representation uses 'pretty' WKT."
return self.pretty_wkt
# #### SpatialReference Methods ####
def attr_value(self, target, index=0):
"""
The attribute value for the given target node (e.g. 'PROJCS'). The index
keyword specifies an index of the child node to return.
"""
if not isinstance(target, six.string_types) or not isinstance(index, int):
raise TypeError
return capi.get_attr_value(self.ptr, force_bytes(target), index)
def auth_name(self, target):
"Returns the authority name for the given string target node."
return capi.get_auth_name(self.ptr, force_bytes(target))
def auth_code(self, target):
"Returns the authority code for the given string target node."
return capi.get_auth_code(self.ptr, force_bytes(target))
def clone(self):
"Returns a clone of this SpatialReference object."
return SpatialReference(capi.clone_srs(self.ptr))
def from_esri(self):
"Morphs this SpatialReference from ESRI's format to EPSG."
capi.morph_from_esri(self.ptr)
def identify_epsg(self):
"""
This method inspects the WKT of this SpatialReference, and will
add EPSG authority nodes where an EPSG identifier is applicable.
"""
capi.identify_epsg(self.ptr)
def to_esri(self):
"Morphs this SpatialReference to ESRI's format."
capi.morph_to_esri(self.ptr)
def validate(self):
"Checks to see if the given spatial reference is valid."
capi.srs_validate(self.ptr)
# #### Name & SRID properties ####
@property
def name(self):
"Returns the name of this Spatial Reference."
if self.projected:
return self.attr_value('PROJCS')
elif self.geographic:
return self.attr_value('GEOGCS')
elif self.local:
return self.attr_value('LOCAL_CS')
else:
return None
@property
def srid(self):
"Returns the SRID of top-level authority, or None if undefined."
try:
return int(self.attr_value('AUTHORITY', 1))
except (TypeError, ValueError):
return None
# #### Unit Properties ####
@property
def linear_name(self):
"Returns the name of the linear units."
units, name = capi.linear_units(self.ptr, byref(c_char_p()))
return name
@property
def linear_units(self):
"Returns the value of the linear units."
units, name = capi.linear_units(self.ptr, byref(c_char_p()))
return units
@property
def angular_name(self):
"Returns the name of the angular units."
units, name = capi.angular_units(self.ptr, byref(c_char_p()))
return name
@property
def angular_units(self):
"Returns the value of the angular units."
units, name = capi.angular_units(self.ptr, byref(c_char_p()))
return units
@property
def units(self):
"""
Returns a 2-tuple of the units value and the units name,
and will automatically determines whether to return the linear
or angular units.
"""
units, name = None, None
if self.projected or self.local:
units, name = capi.linear_units(self.ptr, byref(c_char_p()))
elif self.geographic:
units, name = capi.angular_units(self.ptr, byref(c_char_p()))
if name is not None:
name = force_text(name)
return (units, name)
# #### Spheroid/Ellipsoid Properties ####
@property
def ellipsoid(self):
"""
Returns a tuple of the ellipsoid parameters:
(semimajor axis, semiminor axis, and inverse flattening)
"""
return (self.semi_major, self.semi_minor, self.inverse_flattening)
@property
def semi_major(self):
"Returns the Semi Major Axis for this Spatial Reference."
return capi.semi_major(self.ptr, byref(c_int()))
@property
def semi_minor(self):
"Returns the Semi Minor Axis for this Spatial Reference."
return capi.semi_minor(self.ptr, byref(c_int()))
@property
def inverse_flattening(self):
"Returns the Inverse Flattening for this Spatial Reference."
return capi.invflattening(self.ptr, byref(c_int()))
# #### Boolean Properties ####
@property
def geographic(self):
"""
Returns True if this SpatialReference is geographic
(root node is GEOGCS).
"""
return bool(capi.isgeographic(self.ptr))
@property
def local(self):
"Returns True if this SpatialReference is local (root node is LOCAL_CS)."
return bool(capi.islocal(self.ptr))
@property
def projected(self):
"""
Returns True if this SpatialReference is a projected coordinate system
(root node is PROJCS).
"""
return bool(capi.isprojected(self.ptr))
# #### Import Routines #####
def import_epsg(self, epsg):
"Imports the Spatial Reference from the EPSG code (an integer)."
capi.from_epsg(self.ptr, epsg)
def import_proj(self, proj):
"Imports the Spatial Reference from a PROJ.4 string."
capi.from_proj(self.ptr, proj)
def import_user_input(self, user_input):
"Imports the Spatial Reference from the given user input string."
capi.from_user_input(self.ptr, force_bytes(user_input))
def import_wkt(self, wkt):
"Imports the Spatial Reference from OGC WKT (string)"
capi.from_wkt(self.ptr, byref(c_char_p(wkt)))
def import_xml(self, xml):
"Imports the Spatial Reference from an XML string."
capi.from_xml(self.ptr, xml)
# #### Export Properties ####
@property
def wkt(self):
"Returns the WKT representation of this Spatial Reference."
return capi.to_wkt(self.ptr, byref(c_char_p()))
@property
def pretty_wkt(self, simplify=0):
"Returns the 'pretty' representation of the WKT."
return capi.to_pretty_wkt(self.ptr, byref(c_char_p()), simplify)
@property
def proj(self):
"Returns the PROJ.4 representation for this Spatial Reference."
return capi.to_proj(self.ptr, byref(c_char_p()))
@property
def proj4(self):
"Alias for proj()."
return self.proj
@property
def xml(self, dialect=''):
"Returns the XML representation of this Spatial Reference."
return capi.to_xml(self.ptr, byref(c_char_p()), dialect)
class CoordTransform(GDALBase):
"The coordinate system transformation object."
def __init__(self, source, target):
"Initializes on a source and target SpatialReference objects."
if not isinstance(source, SpatialReference) or not isinstance(target, SpatialReference):
raise TypeError('source and target must be of type SpatialReference')
self.ptr = capi.new_ct(source._ptr, target._ptr)
self._srs1_name = source.name
self._srs2_name = target.name
def __del__(self):
"Deletes this Coordinate Transformation object."
if self._ptr and capi:
capi.destroy_ct(self._ptr)
def __str__(self):
return 'Transform from "%s" to "%s"' % (self._srs1_name, self._srs2_name)
| bsd-3-clause | 37,023,853,127,651,230 | -7,539,471,682,451,482,000 | 33.606322 | 97 | 0.603836 | false |
MounirMesselmeni/django | django/contrib/auth/migrations/0008_alter_user_username_max_length.py | 26 | 1030 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('auth', '0007_alter_validators_add_error_messages'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(
error_messages={'unique': 'A user with that username already exists.'},
help_text='Required. 254 characters or fewer. Letters, digits and @/./+/-/_ only.',
max_length=254,
unique=True,
validators=[
django.core.validators.RegexValidator(
'^[\\w.@+-]+$', 'Enter a valid username. '
'This value may contain only letters, numbers and @/./+/-/_ characters.'
),
],
verbose_name='username',
),
),
]
| bsd-3-clause | -2,460,225,120,297,061,000 | -1,897,814,564,887,023,000 | 31.1875 | 99 | 0.508738 | false |
tumbl3w33d/ansible | lib/ansible/modules/crypto/openssl_certificate.py | 2 | 115826 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016-2017, Yanis Guenane <yanis+ansible@guenane.org>
# Copyright: (c) 2017, Markus Teufelberger <mteufelberger+ansible@mgit.at>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: openssl_certificate
version_added: "2.4"
short_description: Generate and/or check OpenSSL certificates
description:
- This module allows one to (re)generate OpenSSL certificates.
- It implements a notion of provider (ie. C(selfsigned), C(ownca), C(acme), C(assertonly), C(entrust))
for your certificate.
- The C(assertonly) provider is intended for use cases where one is only interested in
checking properties of a supplied certificate. Please note that this provider has been
deprecated in Ansible 2.9 and will be removed in Ansible 2.13. See the examples on how
to emulate C(assertonly) usage with M(openssl_certificate_info), M(openssl_csr_info),
M(openssl_privatekey_info) and M(assert). This also allows more flexible checks than
the ones offered by the C(assertonly) provider.
- The C(ownca) provider is intended for generating OpenSSL certificate signed with your own
CA (Certificate Authority) certificate (self-signed certificate).
- Many properties that can be specified in this module are for validation of an
existing or newly generated certificate. The proper place to specify them, if you
want to receive a certificate with these properties is a CSR (Certificate Signing Request).
- "Please note that the module regenerates existing certificate if it doesn't match the module's
options, or if it seems to be corrupt. If you are concerned that this could overwrite
your existing certificate, consider using the I(backup) option."
- It uses the pyOpenSSL or cryptography python library to interact with OpenSSL.
- If both the cryptography and PyOpenSSL libraries are available (and meet the minimum version requirements)
cryptography will be preferred as a backend over PyOpenSSL (unless the backend is forced with C(select_crypto_backend)).
Please note that the PyOpenSSL backend was deprecated in Ansible 2.9 and will be removed in Ansible 2.13.
requirements:
- PyOpenSSL >= 0.15 or cryptography >= 1.6 (if using C(selfsigned) or C(assertonly) provider)
- acme-tiny (if using the C(acme) provider)
author:
- Yanis Guenane (@Spredzy)
- Markus Teufelberger (@MarkusTeufelberger)
options:
state:
description:
- Whether the certificate should exist or not, taking action if the state is different from what is stated.
type: str
default: present
choices: [ absent, present ]
path:
description:
- Remote absolute path where the generated certificate file should be created or is already located.
type: path
required: true
provider:
description:
- Name of the provider to use to generate/retrieve the OpenSSL certificate.
- The C(assertonly) provider will not generate files and fail if the certificate file is missing.
- The C(assertonly) provider has been deprecated in Ansible 2.9 and will be removed in Ansible 2.13.
Please see the examples on how to emulate it with M(openssl_certificate_info), M(openssl_csr_info),
M(openssl_privatekey_info) and M(assert).
- "The C(entrust) provider was added for Ansible 2.9 and requires credentials for the
L(https://www.entrustdatacard.com/products/categories/ssl-certificates,Entrust Certificate Services) (ECS) API."
type: str
required: true
choices: [ acme, assertonly, entrust, ownca, selfsigned ]
force:
description:
- Generate the certificate, even if it already exists.
type: bool
default: no
csr_path:
description:
- Path to the Certificate Signing Request (CSR) used to generate this certificate.
- This is not required in C(assertonly) mode.
type: path
privatekey_path:
description:
- Path to the private key to use when signing the certificate.
type: path
privatekey_passphrase:
description:
- The passphrase for the I(privatekey_path).
- This is required if the private key is password protected.
type: str
selfsigned_version:
description:
- Version of the C(selfsigned) certificate.
- Nowadays it should almost always be C(3).
- This is only used by the C(selfsigned) provider.
type: int
default: 3
version_added: "2.5"
selfsigned_digest:
description:
- Digest algorithm to be used when self-signing the certificate.
- This is only used by the C(selfsigned) provider.
type: str
default: sha256
selfsigned_not_before:
description:
- The point in time the certificate is valid from.
- Time can be specified either as relative time or as absolute timestamp.
- Time will always be interpreted as UTC.
- Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using relative time this module is NOT idempotent.
- If this value is not specified, the certificate will start being valid from now.
- This is only used by the C(selfsigned) provider.
type: str
default: +0s
aliases: [ selfsigned_notBefore ]
selfsigned_not_after:
description:
- The point in time at which the certificate stops being valid.
- Time can be specified either as relative time or as absolute timestamp.
- Time will always be interpreted as UTC.
- Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using relative time this module is NOT idempotent.
- If this value is not specified, the certificate will stop being valid 10 years from now.
- This is only used by the C(selfsigned) provider.
type: str
default: +3650d
aliases: [ selfsigned_notAfter ]
selfsigned_create_subject_key_identifier:
description:
- Whether to create the Subject Key Identifier (SKI) from the public key.
- A value of C(create_if_not_provided) (default) only creates a SKI when the CSR does not
provide one.
- A value of C(always_create) always creates a SKI. If the CSR provides one, that one is
ignored.
- A value of C(never_create) never creates a SKI. If the CSR provides one, that one is used.
- This is only used by the C(selfsigned) provider.
- Note that this is only supported if the C(cryptography) backend is used!
type: str
choices: [create_if_not_provided, always_create, never_create]
default: create_if_not_provided
version_added: "2.9"
ownca_path:
description:
- Remote absolute path of the CA (Certificate Authority) certificate.
- This is only used by the C(ownca) provider.
type: path
version_added: "2.7"
ownca_privatekey_path:
description:
- Path to the CA (Certificate Authority) private key to use when signing the certificate.
- This is only used by the C(ownca) provider.
type: path
version_added: "2.7"
ownca_privatekey_passphrase:
description:
- The passphrase for the I(ownca_privatekey_path).
- This is only used by the C(ownca) provider.
type: str
version_added: "2.7"
ownca_digest:
description:
- The digest algorithm to be used for the C(ownca) certificate.
- This is only used by the C(ownca) provider.
type: str
default: sha256
version_added: "2.7"
ownca_version:
description:
- The version of the C(ownca) certificate.
- Nowadays it should almost always be C(3).
- This is only used by the C(ownca) provider.
type: int
default: 3
version_added: "2.7"
ownca_not_before:
description:
- The point in time the certificate is valid from.
- Time can be specified either as relative time or as absolute timestamp.
- Time will always be interpreted as UTC.
- Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using relative time this module is NOT idempotent.
- If this value is not specified, the certificate will start being valid from now.
- This is only used by the C(ownca) provider.
type: str
default: +0s
version_added: "2.7"
ownca_not_after:
description:
- The point in time at which the certificate stops being valid.
- Time can be specified either as relative time or as absolute timestamp.
- Time will always be interpreted as UTC.
- Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using relative time this module is NOT idempotent.
- If this value is not specified, the certificate will stop being valid 10 years from now.
- This is only used by the C(ownca) provider.
type: str
default: +3650d
version_added: "2.7"
ownca_create_subject_key_identifier:
description:
- Whether to create the Subject Key Identifier (SKI) from the public key.
- A value of C(create_if_not_provided) (default) only creates a SKI when the CSR does not
provide one.
- A value of C(always_create) always creates a SKI. If the CSR provides one, that one is
ignored.
- A value of C(never_create) never creates a SKI. If the CSR provides one, that one is used.
- This is only used by the C(ownca) provider.
- Note that this is only supported if the C(cryptography) backend is used!
type: str
choices: [create_if_not_provided, always_create, never_create]
default: create_if_not_provided
version_added: "2.9"
ownca_create_authority_key_identifier:
description:
- Create a Authority Key Identifier from the CA's certificate. If the CSR provided
a authority key identifier, it is ignored.
- The Authority Key Identifier is generated from the CA certificate's Subject Key Identifier,
if available. If it is not available, the CA certificate's public key will be used.
- This is only used by the C(ownca) provider.
- Note that this is only supported if the C(cryptography) backend is used!
type: bool
default: yes
version_added: "2.9"
acme_accountkey_path:
description:
- The path to the accountkey for the C(acme) provider.
- This is only used by the C(acme) provider.
type: path
acme_challenge_path:
description:
- The path to the ACME challenge directory that is served on U(http://<HOST>:80/.well-known/acme-challenge/)
- This is only used by the C(acme) provider.
type: path
acme_chain:
description:
- Include the intermediate certificate to the generated certificate
- This is only used by the C(acme) provider.
- Note that this is only available for older versions of C(acme-tiny).
New versions include the chain automatically, and setting I(acme_chain) to C(yes) results in an error.
type: bool
default: no
version_added: "2.5"
signature_algorithms:
description:
- A list of algorithms that you would accept the certificate to be signed with
(e.g. ['sha256WithRSAEncryption', 'sha512WithRSAEncryption']).
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: list
elements: str
issuer:
description:
- The key/value pairs that must be present in the issuer name field of the certificate.
- If you need to specify more than one value with the same key, use a list as value.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: dict
issuer_strict:
description:
- If set to C(yes), the I(issuer) field must contain only these values.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: bool
default: no
version_added: "2.5"
subject:
description:
- The key/value pairs that must be present in the subject name field of the certificate.
- If you need to specify more than one value with the same key, use a list as value.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: dict
subject_strict:
description:
- If set to C(yes), the I(subject) field must contain only these values.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: bool
default: no
version_added: "2.5"
has_expired:
description:
- Checks if the certificate is expired/not expired at the time the module is executed.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: bool
default: no
version:
description:
- The version of the certificate.
- Nowadays it should almost always be 3.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: int
valid_at:
description:
- The certificate must be valid at this point in time.
- The timestamp is formatted as an ASN.1 TIME.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: str
invalid_at:
description:
- The certificate must be invalid at this point in time.
- The timestamp is formatted as an ASN.1 TIME.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: str
not_before:
description:
- The certificate must start to become valid at this point in time.
- The timestamp is formatted as an ASN.1 TIME.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: str
aliases: [ notBefore ]
not_after:
description:
- The certificate must expire at this point in time.
- The timestamp is formatted as an ASN.1 TIME.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: str
aliases: [ notAfter ]
valid_in:
description:
- The certificate must still be valid at this relative time offset from now.
- Valid format is C([+-]timespec | number_of_seconds) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using this parameter, this module is NOT idempotent.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: str
key_usage:
description:
- The I(key_usage) extension field must contain all these values.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: list
elements: str
aliases: [ keyUsage ]
key_usage_strict:
description:
- If set to C(yes), the I(key_usage) extension field must contain only these values.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: bool
default: no
aliases: [ keyUsage_strict ]
extended_key_usage:
description:
- The I(extended_key_usage) extension field must contain all these values.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: list
elements: str
aliases: [ extendedKeyUsage ]
extended_key_usage_strict:
description:
- If set to C(yes), the I(extended_key_usage) extension field must contain only these values.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: bool
default: no
aliases: [ extendedKeyUsage_strict ]
subject_alt_name:
description:
- The I(subject_alt_name) extension field must contain these values.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: list
elements: str
aliases: [ subjectAltName ]
subject_alt_name_strict:
description:
- If set to C(yes), the I(subject_alt_name) extension field must contain only these values.
- This is only used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: bool
default: no
aliases: [ subjectAltName_strict ]
select_crypto_backend:
description:
- Determines which crypto backend to use.
- The default choice is C(auto), which tries to use C(cryptography) if available, and falls back to C(pyopenssl).
- If set to C(pyopenssl), will try to use the L(pyOpenSSL,https://pypi.org/project/pyOpenSSL/) library.
- If set to C(cryptography), will try to use the L(cryptography,https://cryptography.io/) library.
- Please note that the C(pyopenssl) backend has been deprecated in Ansible 2.9, and will be removed in Ansible 2.13.
From that point on, only the C(cryptography) backend will be available.
type: str
default: auto
choices: [ auto, cryptography, pyopenssl ]
version_added: "2.8"
backup:
description:
- Create a backup file including a timestamp so you can get the original
certificate back if you overwrote it with a new one by accident.
- This is not used by the C(assertonly) provider.
- This option is deprecated since Ansible 2.9 and will be removed with the C(assertonly) provider in Ansible 2.13.
For alternatives, see the example on replacing C(assertonly).
type: bool
default: no
version_added: "2.8"
entrust_cert_type:
description:
- Specify the type of certificate requested.
- This is only used by the C(entrust) provider.
type: str
default: STANDARD_SSL
choices: [ 'STANDARD_SSL', 'ADVANTAGE_SSL', 'UC_SSL', 'EV_SSL', 'WILDCARD_SSL', 'PRIVATE_SSL', 'PD_SSL', 'CDS_ENT_LITE', 'CDS_ENT_PRO', 'SMIME_ENT' ]
version_added: "2.9"
entrust_requester_email:
description:
- The email of the requester of the certificate (for tracking purposes).
- This is only used by the C(entrust) provider.
- This is required if the provider is C(entrust).
type: str
version_added: "2.9"
entrust_requester_name:
description:
- The name of the requester of the certificate (for tracking purposes).
- This is only used by the C(entrust) provider.
- This is required if the provider is C(entrust).
type: str
version_added: "2.9"
entrust_requester_phone:
description:
- The phone number of the requester of the certificate (for tracking purposes).
- This is only used by the C(entrust) provider.
- This is required if the provider is C(entrust).
type: str
version_added: "2.9"
entrust_api_user:
description:
- The username for authentication to the Entrust Certificate Services (ECS) API.
- This is only used by the C(entrust) provider.
- This is required if the provider is C(entrust).
type: str
version_added: "2.9"
entrust_api_key:
description:
- The key (password) for authentication to the Entrust Certificate Services (ECS) API.
- This is only used by the C(entrust) provider.
- This is required if the provider is C(entrust).
type: str
version_added: "2.9"
entrust_api_client_cert_path:
description:
- The path to the client certificate used to authenticate to the Entrust Certificate Services (ECS) API.
- This is only used by the C(entrust) provider.
- This is required if the provider is C(entrust).
type: path
version_added: "2.9"
entrust_api_client_cert_key_path:
description:
- The path to the private key of the client certificate used to authenticate to the Entrust Certificate Services (ECS) API.
- This is only used by the C(entrust) provider.
- This is required if the provider is C(entrust).
type: path
version_added: "2.9"
entrust_not_after:
description:
- The point in time at which the certificate stops being valid.
- Time can be specified either as relative time or as an absolute timestamp.
- A valid absolute time format is C(ASN.1 TIME) such as C(2019-06-18).
- A valid relative time format is C([+-]timespec) where timespec can be an integer + C([w | d | h | m | s]), such as C(+365d) or C(+32w1d2h)).
- Time will always be interpreted as UTC.
- Note that only the date (day, month, year) is supported for specifying the expiry date of the issued certificate.
- The full date-time is adjusted to EST (GMT -5:00) before issuance, which may result in a certificate with an expiration date one day
earlier than expected if a relative time is used.
- The minimum certificate lifetime is 90 days, and maximum is three years.
- If this value is not specified, the certificate will stop being valid 365 days the date of issue.
- This is only used by the C(entrust) provider.
type: str
default: +365d
version_added: "2.9"
entrust_api_specification_path:
description:
- The path to the specification file defining the Entrust Certificate Services (ECS) API configuration.
- You can use this to keep a local copy of the specification to avoid downloading it every time the module is used.
- This is only used by the C(entrust) provider.
type: path
default: https://cloud.entrust.net/EntrustCloud/documentation/cms-api-2.1.0.yaml
version_added: "2.9"
extends_documentation_fragment: files
notes:
- All ASN.1 TIME values should be specified following the YYYYMMDDHHMMSSZ pattern.
- Date specified should be UTC. Minutes and seconds are mandatory.
- For security reason, when you use C(ownca) provider, you should NOT run M(openssl_certificate) on
a target machine, but on a dedicated CA machine. It is recommended not to store the CA private key
on the target machine. Once signed, the certificate can be moved to the target machine.
seealso:
- module: openssl_csr
- module: openssl_dhparam
- module: openssl_pkcs12
- module: openssl_privatekey
- module: openssl_publickey
'''
EXAMPLES = r'''
- name: Generate a Self Signed OpenSSL certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
privatekey_path: /etc/ssl/private/ansible.com.pem
csr_path: /etc/ssl/csr/ansible.com.csr
provider: selfsigned
- name: Generate an OpenSSL certificate signed with your own CA certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
ownca_path: /etc/ssl/crt/ansible_CA.crt
ownca_privatekey_path: /etc/ssl/private/ansible_CA.pem
provider: ownca
- name: Generate a Let's Encrypt Certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
provider: acme
acme_accountkey_path: /etc/ssl/private/ansible.com.pem
acme_challenge_path: /etc/ssl/challenges/ansible.com/
- name: Force (re-)generate a new Let's Encrypt Certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
provider: acme
acme_accountkey_path: /etc/ssl/private/ansible.com.pem
acme_challenge_path: /etc/ssl/challenges/ansible.com/
force: yes
- name: Generate an Entrust certificate via the Entrust Certificate Services (ECS) API
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
provider: entrust
entrust_requester_name: Jo Doe
entrust_requester_email: jdoe@ansible.com
entrust_requester_phone: 555-555-5555
entrust_cert_type: STANDARD_SSL
entrust_api_user: apiusername
entrust_api_key: a^lv*32!cd9LnT
entrust_api_client_cert_path: /etc/ssl/entrust/ecs-client.crt
entrust_api_client_cert_key_path: /etc/ssl/entrust/ecs-key.crt
entrust_api_specification_path: /etc/ssl/entrust/api-docs/cms-api-2.1.0.yaml
# The following example shows one assertonly usage using all existing options for
# assertonly, and shows how to emulate the behavior with the openssl_certificate_info,
# openssl_csr_info, openssl_privatekey_info and assert modules:
- openssl_certificate:
provider: assertonly
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
privatekey_path: /etc/ssl/csr/ansible.com.key
signature_algorithms:
- sha256WithRSAEncryption
- sha512WithRSAEncryption
subject:
commonName: ansible.com
subject_strict: yes
issuer:
commonName: ansible.com
issuer_strict: yes
has_expired: no
version: 3
key_usage:
- Data Encipherment
key_usage_strict: yes
extended_key_usage:
- DVCS
extended_key_usage_strict: yes
subject_alt_name:
- dns:ansible.com
subject_alt_name_strict: yes
not_before: 20190331202428Z
not_after: 20190413202428Z
valid_at: "+1d10h"
invalid_at: 20200331202428Z
valid_in: 10 # in ten seconds
- openssl_certificate_info:
path: /etc/ssl/crt/ansible.com.crt
# for valid_at, invalid_at and valid_in
valid_at:
one_day_ten_hours: "+1d10h"
fixed_timestamp: 20200331202428Z
ten_seconds: "+10"
register: result
- openssl_csr_info:
# Verifies that the CSR signature is valid; module will fail if not
path: /etc/ssl/csr/ansible.com.csr
register: result_csr
- openssl_privatekey_info:
path: /etc/ssl/csr/ansible.com.key
register: result_privatekey
- assert:
that:
# When private key is specified for assertonly, this will be checked:
- result.public_key == result_privatekey.public_key
# When CSR is specified for assertonly, this will be checked:
- result.public_key == result_csr.public_key
- result.subject_ordered == result_csr.subject_ordered
- result.extensions_by_oid == result_csr.extensions_by_oid
# signature_algorithms check
- "result.signature_algorithm == 'sha256WithRSAEncryption' or result.signature_algorithm == 'sha512WithRSAEncryption'"
# subject and subject_strict
- "result.subject.commonName == 'ansible.com'"
- "result.subject | length == 1" # the number must be the number of entries you check for
# issuer and issuer_strict
- "result.issuer.commonName == 'ansible.com'"
- "result.issuer | length == 1" # the number must be the number of entries you check for
# has_expired
- not result.expired
# version
- result.version == 3
# key_usage and key_usage_strict
- "'Data Encipherment' in result.key_usage"
- "result.key_usage | length == 1" # the number must be the number of entries you check for
# extended_key_usage and extended_key_usage_strict
- "'DVCS' in result.extended_key_usage"
- "result.extended_key_usage | length == 1" # the number must be the number of entries you check for
# subject_alt_name and subject_alt_name_strict
- "'dns:ansible.com' in result.subject_alt_name"
- "result.subject_alt_name | length == 1" # the number must be the number of entries you check for
# not_before and not_after
- "result.not_before == '20190331202428Z'"
- "result.not_after == '20190413202428Z'"
# valid_at, invalid_at and valid_in
- "result.valid_at.one_day_ten_hours" # for valid_at
- "not result.valid_at.fixed_timestamp" # for invalid_at
- "result.valid_at.ten_seconds" # for valid_in
# Examples for some checks one could use the assertonly provider for:
# (Please note that assertonly has been deprecated!)
# How to use the assertonly provider to implement and trigger your own custom certificate generation workflow:
- name: Check if a certificate is currently still valid, ignoring failures
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
has_expired: no
ignore_errors: yes
register: validity_check
- name: Run custom task(s) to get a new, valid certificate in case the initial check failed
command: superspecialSSL recreate /etc/ssl/crt/example.com.crt
when: validity_check.failed
- name: Check the new certificate again for validity with the same parameters, this time failing the play if it is still invalid
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
has_expired: no
when: validity_check.failed
# Some other checks that assertonly could be used for:
- name: Verify that an existing certificate was issued by the Let's Encrypt CA and is currently still valid
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
issuer:
O: Let's Encrypt
has_expired: no
- name: Ensure that a certificate uses a modern signature algorithm (no SHA1, MD5 or DSA)
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
signature_algorithms:
- sha224WithRSAEncryption
- sha256WithRSAEncryption
- sha384WithRSAEncryption
- sha512WithRSAEncryption
- sha224WithECDSAEncryption
- sha256WithECDSAEncryption
- sha384WithECDSAEncryption
- sha512WithECDSAEncryption
- name: Ensure that the existing certificate belongs to the specified private key
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
privatekey_path: /etc/ssl/private/example.com.pem
provider: assertonly
- name: Ensure that the existing certificate is still valid at the winter solstice 2017
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
valid_at: 20171221162800Z
- name: Ensure that the existing certificate is still valid 2 weeks (1209600 seconds) from now
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
valid_in: 1209600
- name: Ensure that the existing certificate is only used for digital signatures and encrypting other keys
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
key_usage:
- digitalSignature
- keyEncipherment
key_usage_strict: true
- name: Ensure that the existing certificate can be used for client authentication
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
extended_key_usage:
- clientAuth
- name: Ensure that the existing certificate can only be used for client authentication and time stamping
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
extended_key_usage:
- clientAuth
- 1.3.6.1.5.5.7.3.8
extended_key_usage_strict: true
- name: Ensure that the existing certificate has a certain domain in its subjectAltName
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
subject_alt_name:
- www.example.com
- test.example.com
'''
RETURN = r'''
filename:
description: Path to the generated Certificate
returned: changed or success
type: str
sample: /etc/ssl/crt/www.ansible.com.crt
backup_file:
description: Name of backup file created.
returned: changed and if I(backup) is C(yes)
type: str
sample: /path/to/www.ansible.com.crt.2019-03-09@11:22~
'''
from random import randint
import abc
import datetime
import time
import os
import traceback
from distutils.version import LooseVersion
from ansible.module_utils import crypto as crypto_utils
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils._text import to_native, to_bytes, to_text
from ansible.module_utils.compat import ipaddress as compat_ipaddress
from ansible.module_utils.ecs.api import ECSClient, RestOperationException, SessionConfigurationException
MINIMAL_CRYPTOGRAPHY_VERSION = '1.6'
MINIMAL_PYOPENSSL_VERSION = '0.15'
PYOPENSSL_IMP_ERR = None
try:
import OpenSSL
from OpenSSL import crypto
PYOPENSSL_VERSION = LooseVersion(OpenSSL.__version__)
except ImportError:
PYOPENSSL_IMP_ERR = traceback.format_exc()
PYOPENSSL_FOUND = False
else:
PYOPENSSL_FOUND = True
CRYPTOGRAPHY_IMP_ERR = None
try:
import cryptography
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.serialization import Encoding
from cryptography.x509 import NameAttribute, Name
from cryptography.x509.oid import NameOID
CRYPTOGRAPHY_VERSION = LooseVersion(cryptography.__version__)
except ImportError:
CRYPTOGRAPHY_IMP_ERR = traceback.format_exc()
CRYPTOGRAPHY_FOUND = False
else:
CRYPTOGRAPHY_FOUND = True
class CertificateError(crypto_utils.OpenSSLObjectError):
pass
class Certificate(crypto_utils.OpenSSLObject):
def __init__(self, module, backend):
super(Certificate, self).__init__(
module.params['path'],
module.params['state'],
module.params['force'],
module.check_mode
)
self.provider = module.params['provider']
self.privatekey_path = module.params['privatekey_path']
self.privatekey_passphrase = module.params['privatekey_passphrase']
self.csr_path = module.params['csr_path']
self.cert = None
self.privatekey = None
self.csr = None
self.backend = backend
self.module = module
# The following are default values which make sure check() works as
# before if providers do not explicitly change these properties.
self.create_subject_key_identifier = 'never_create'
self.create_authority_key_identifier = False
self.backup = module.params['backup']
self.backup_file = None
def get_relative_time_option(self, input_string, input_name):
"""Return an ASN1 formatted string if a relative timespec
or an ASN1 formatted string is provided."""
result = to_native(input_string)
if result is None:
raise CertificateError(
'The timespec "%s" for %s is not valid' %
input_string, input_name)
if result.startswith("+") or result.startswith("-"):
result_datetime = crypto_utils.convert_relative_to_datetime(
result)
if self.backend == 'pyopenssl':
return result_datetime.strftime("%Y%m%d%H%M%SZ")
elif self.backend == 'cryptography':
return result_datetime
if self.backend == 'cryptography':
for date_fmt in ['%Y%m%d%H%M%SZ', '%Y%m%d%H%MZ', '%Y%m%d%H%M%S%z', '%Y%m%d%H%M%z']:
try:
return datetime.datetime.strptime(result, date_fmt)
except ValueError:
pass
raise CertificateError(
'The time spec "%s" for %s is invalid' %
(input_string, input_name)
)
return input_string
def _validate_privatekey(self):
if self.backend == 'pyopenssl':
ctx = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_2_METHOD)
ctx.use_privatekey(self.privatekey)
ctx.use_certificate(self.cert)
try:
ctx.check_privatekey()
return True
except OpenSSL.SSL.Error:
return False
elif self.backend == 'cryptography':
return crypto_utils.cryptography_compare_public_keys(self.cert.public_key(), self.privatekey.public_key())
def _validate_csr(self):
if self.backend == 'pyopenssl':
# Verify that CSR is signed by certificate's private key
try:
self.csr.verify(self.cert.get_pubkey())
except OpenSSL.crypto.Error:
return False
# Check subject
if self.csr.get_subject() != self.cert.get_subject():
return False
# Check extensions
csr_extensions = self.csr.get_extensions()
cert_extension_count = self.cert.get_extension_count()
if len(csr_extensions) != cert_extension_count:
return False
for extension_number in range(0, cert_extension_count):
cert_extension = self.cert.get_extension(extension_number)
csr_extension = filter(lambda extension: extension.get_short_name() == cert_extension.get_short_name(), csr_extensions)
if cert_extension.get_data() != list(csr_extension)[0].get_data():
return False
return True
elif self.backend == 'cryptography':
# Verify that CSR is signed by certificate's private key
if not self.csr.is_signature_valid:
return False
if not crypto_utils.cryptography_compare_public_keys(self.csr.public_key(), self.cert.public_key()):
return False
# Check subject
if self.csr.subject != self.cert.subject:
return False
# Check extensions
cert_exts = list(self.cert.extensions)
csr_exts = list(self.csr.extensions)
if self.create_subject_key_identifier != 'never_create':
# Filter out SubjectKeyIdentifier extension before comparison
cert_exts = list(filter(lambda x: not isinstance(x.value, x509.SubjectKeyIdentifier), cert_exts))
csr_exts = list(filter(lambda x: not isinstance(x.value, x509.SubjectKeyIdentifier), csr_exts))
if self.create_authority_key_identifier:
# Filter out AuthorityKeyIdentifier extension before comparison
cert_exts = list(filter(lambda x: not isinstance(x.value, x509.AuthorityKeyIdentifier), cert_exts))
csr_exts = list(filter(lambda x: not isinstance(x.value, x509.AuthorityKeyIdentifier), csr_exts))
if len(cert_exts) != len(csr_exts):
return False
for cert_ext in cert_exts:
try:
csr_ext = self.csr.extensions.get_extension_for_oid(cert_ext.oid)
if cert_ext != csr_ext:
return False
except cryptography.x509.ExtensionNotFound as dummy:
return False
return True
def remove(self, module):
if self.backup:
self.backup_file = module.backup_local(self.path)
super(Certificate, self).remove(module)
def check(self, module, perms_required=True):
"""Ensure the resource is in its desired state."""
state_and_perms = super(Certificate, self).check(module, perms_required)
if not state_and_perms:
return False
try:
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
except Exception as dummy:
return False
if self.privatekey_path:
try:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path,
self.privatekey_passphrase,
backend=self.backend
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
raise CertificateError(exc)
if not self._validate_privatekey():
return False
if self.csr_path:
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
if not self._validate_csr():
return False
# Check SubjectKeyIdentifier
if self.backend == 'cryptography' and self.create_subject_key_identifier != 'never_create':
# Get hold of certificate's SKI
try:
ext = self.cert.extensions.get_extension_for_class(x509.SubjectKeyIdentifier)
except cryptography.x509.ExtensionNotFound as dummy:
return False
# Get hold of CSR's SKI for 'create_if_not_provided'
csr_ext = None
if self.create_subject_key_identifier == 'create_if_not_provided':
try:
csr_ext = self.csr.extensions.get_extension_for_class(x509.SubjectKeyIdentifier)
except cryptography.x509.ExtensionNotFound as dummy:
pass
if csr_ext is None:
# If CSR had no SKI, or we chose to ignore it ('always_create'), compare with created SKI
if ext.value.digest != x509.SubjectKeyIdentifier.from_public_key(self.cert.public_key()).digest:
return False
else:
# If CSR had SKI and we didn't ignore it ('create_if_not_provided'), compare SKIs
if ext.value.digest != csr_ext.value.digest:
return False
return True
class CertificateAbsent(Certificate):
def __init__(self, module):
super(CertificateAbsent, self).__init__(module, 'cryptography') # backend doesn't matter
def generate(self, module):
pass
def dump(self, check_mode=False):
# Use only for absent
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
return result
class SelfSignedCertificateCryptography(Certificate):
"""Generate the self-signed certificate, using the cryptography backend"""
def __init__(self, module):
super(SelfSignedCertificateCryptography, self).__init__(module, 'cryptography')
self.create_subject_key_identifier = module.params['selfsigned_create_subject_key_identifier']
self.notBefore = self.get_relative_time_option(module.params['selfsigned_not_before'], 'selfsigned_not_before')
self.notAfter = self.get_relative_time_option(module.params['selfsigned_not_after'], 'selfsigned_not_after')
self.digest = crypto_utils.select_message_digest(module.params['selfsigned_digest'])
self.version = module.params['selfsigned_version']
self.serial_number = x509.random_serial_number()
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file {0} does not exist'.format(self.csr_path)
)
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key file {0} does not exist'.format(self.privatekey_path)
)
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
self._module = module
try:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path, self.privatekey_passphrase, backend=self.backend
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
module.fail_json(msg=to_native(exc))
if crypto_utils.cryptography_key_needs_digest_for_signing(self.privatekey):
if self.digest is None:
raise CertificateError(
'The digest %s is not supported with the cryptography backend' % module.params['selfsigned_digest']
)
else:
self.digest = None
def generate(self, module):
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key %s does not exist' % self.privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
try:
cert_builder = x509.CertificateBuilder()
cert_builder = cert_builder.subject_name(self.csr.subject)
cert_builder = cert_builder.issuer_name(self.csr.subject)
cert_builder = cert_builder.serial_number(self.serial_number)
cert_builder = cert_builder.not_valid_before(self.notBefore)
cert_builder = cert_builder.not_valid_after(self.notAfter)
cert_builder = cert_builder.public_key(self.privatekey.public_key())
has_ski = False
for extension in self.csr.extensions:
if isinstance(extension.value, x509.SubjectKeyIdentifier):
if self.create_subject_key_identifier == 'always_create':
continue
has_ski = True
cert_builder = cert_builder.add_extension(extension.value, critical=extension.critical)
if not has_ski and self.create_subject_key_identifier != 'never_create':
cert_builder = cert_builder.add_extension(
x509.SubjectKeyIdentifier.from_public_key(self.privatekey.public_key()),
critical=False
)
except ValueError as e:
raise CertificateError(str(e))
try:
certificate = cert_builder.sign(
private_key=self.privatekey, algorithm=self.digest,
backend=default_backend()
)
except TypeError as e:
if str(e) == 'Algorithm must be a registered hash algorithm.' and self.digest is None:
module.fail_json(msg='Signing with Ed25519 and Ed448 keys requires cryptography 2.8 or newer.')
raise
self.cert = certificate
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, certificate.public_bytes(Encoding.PEM))
self.changed = True
else:
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
if check_mode:
result.update({
'notBefore': self.notBefore.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.notAfter.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.not_valid_before.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.cert.not_valid_after.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.cert.serial_number,
})
return result
class SelfSignedCertificate(Certificate):
"""Generate the self-signed certificate."""
def __init__(self, module):
super(SelfSignedCertificate, self).__init__(module, 'pyopenssl')
if module.params['selfsigned_create_subject_key_identifier'] != 'create_if_not_provided':
module.fail_json(msg='selfsigned_create_subject_key_identifier cannot be used with the pyOpenSSL backend!')
self.notBefore = self.get_relative_time_option(module.params['selfsigned_not_before'], 'selfsigned_not_before')
self.notAfter = self.get_relative_time_option(module.params['selfsigned_not_after'], 'selfsigned_not_after')
self.digest = module.params['selfsigned_digest']
self.version = module.params['selfsigned_version']
self.serial_number = randint(1000, 99999)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file {0} does not exist'.format(self.csr_path)
)
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key file {0} does not exist'.format(self.privatekey_path)
)
self.csr = crypto_utils.load_certificate_request(self.csr_path)
try:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path, self.privatekey_passphrase
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
module.fail_json(msg=str(exc))
def generate(self, module):
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key %s does not exist' % self.privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
cert = crypto.X509()
cert.set_serial_number(self.serial_number)
cert.set_notBefore(to_bytes(self.notBefore))
cert.set_notAfter(to_bytes(self.notAfter))
cert.set_subject(self.csr.get_subject())
cert.set_issuer(self.csr.get_subject())
cert.set_version(self.version - 1)
cert.set_pubkey(self.csr.get_pubkey())
cert.add_extensions(self.csr.get_extensions())
cert.sign(self.privatekey, self.digest)
self.cert = cert
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, crypto.dump_certificate(crypto.FILETYPE_PEM, self.cert))
self.changed = True
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
if check_mode:
result.update({
'notBefore': self.notBefore,
'notAfter': self.notAfter,
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.get_notBefore(),
'notAfter': self.cert.get_notAfter(),
'serial_number': self.cert.get_serial_number(),
})
return result
class OwnCACertificateCryptography(Certificate):
"""Generate the own CA certificate. Using the cryptography backend"""
def __init__(self, module):
super(OwnCACertificateCryptography, self).__init__(module, 'cryptography')
self.create_subject_key_identifier = module.params['ownca_create_subject_key_identifier']
self.create_authority_key_identifier = module.params['ownca_create_authority_key_identifier']
self.notBefore = self.get_relative_time_option(module.params['ownca_not_before'], 'ownca_not_before')
self.notAfter = self.get_relative_time_option(module.params['ownca_not_after'], 'ownca_not_after')
self.digest = crypto_utils.select_message_digest(module.params['ownca_digest'])
self.version = module.params['ownca_version']
self.serial_number = x509.random_serial_number()
self.ca_cert_path = module.params['ownca_path']
self.ca_privatekey_path = module.params['ownca_privatekey_path']
self.ca_privatekey_passphrase = module.params['ownca_privatekey_passphrase']
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file {0} does not exist'.format(self.csr_path)
)
if not os.path.exists(self.ca_cert_path):
raise CertificateError(
'The CA certificate file {0} does not exist'.format(self.ca_cert_path)
)
if not os.path.exists(self.ca_privatekey_path):
raise CertificateError(
'The CA private key file {0} does not exist'.format(self.ca_privatekey_path)
)
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
self.ca_cert = crypto_utils.load_certificate(self.ca_cert_path, backend=self.backend)
try:
self.ca_private_key = crypto_utils.load_privatekey(
self.ca_privatekey_path, self.ca_privatekey_passphrase, backend=self.backend
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
module.fail_json(msg=str(exc))
if crypto_utils.cryptography_key_needs_digest_for_signing(self.ca_private_key):
if self.digest is None:
raise CertificateError(
'The digest %s is not supported with the cryptography backend' % module.params['ownca_digest']
)
else:
self.digest = None
def generate(self, module):
if not os.path.exists(self.ca_cert_path):
raise CertificateError(
'The CA certificate %s does not exist' % self.ca_cert_path
)
if not os.path.exists(self.ca_privatekey_path):
raise CertificateError(
'The CA private key %s does not exist' % self.ca_privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
cert_builder = x509.CertificateBuilder()
cert_builder = cert_builder.subject_name(self.csr.subject)
cert_builder = cert_builder.issuer_name(self.ca_cert.subject)
cert_builder = cert_builder.serial_number(self.serial_number)
cert_builder = cert_builder.not_valid_before(self.notBefore)
cert_builder = cert_builder.not_valid_after(self.notAfter)
cert_builder = cert_builder.public_key(self.csr.public_key())
has_ski = False
for extension in self.csr.extensions:
if isinstance(extension.value, x509.SubjectKeyIdentifier):
if self.create_subject_key_identifier == 'always_create':
continue
has_ski = True
if self.create_authority_key_identifier and isinstance(extension.value, x509.AuthorityKeyIdentifier):
continue
cert_builder = cert_builder.add_extension(extension.value, critical=extension.critical)
if not has_ski and self.create_subject_key_identifier != 'never_create':
cert_builder = cert_builder.add_extension(
x509.SubjectKeyIdentifier.from_public_key(self.csr.public_key()),
critical=False
)
if self.create_authority_key_identifier:
try:
ext = self.ca_cert.extensions.get_extension_for_class(x509.SubjectKeyIdentifier)
cert_builder = cert_builder.add_extension(
x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(ext.value)
if CRYPTOGRAPHY_VERSION >= LooseVersion('2.7') else
x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(ext),
critical=False
)
except cryptography.x509.ExtensionNotFound:
cert_builder = cert_builder.add_extension(
x509.AuthorityKeyIdentifier.from_issuer_public_key(self.ca_cert.public_key()),
critical=False
)
try:
certificate = cert_builder.sign(
private_key=self.ca_private_key, algorithm=self.digest,
backend=default_backend()
)
except TypeError as e:
if str(e) == 'Algorithm must be a registered hash algorithm.' and self.digest is None:
module.fail_json(msg='Signing with Ed25519 and Ed448 keys requires cryptography 2.8 or newer.')
raise
self.cert = certificate
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, certificate.public_bytes(Encoding.PEM))
self.changed = True
else:
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def check(self, module, perms_required=True):
"""Ensure the resource is in its desired state."""
if not super(OwnCACertificateCryptography, self).check(module, perms_required):
return False
# Check AuthorityKeyIdentifier
if self.create_authority_key_identifier:
try:
ext = self.ca_cert.extensions.get_extension_for_class(x509.SubjectKeyIdentifier)
expected_ext = (
x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(ext.value)
if CRYPTOGRAPHY_VERSION >= LooseVersion('2.7') else
x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(ext)
)
except cryptography.x509.ExtensionNotFound:
expected_ext = x509.AuthorityKeyIdentifier.from_issuer_public_key(self.ca_cert.public_key())
try:
ext = self.cert.extensions.get_extension_for_class(x509.AuthorityKeyIdentifier)
if ext.value != expected_ext:
return False
except cryptography.x509.ExtensionNotFound as dummy:
return False
return True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path,
'ca_cert': self.ca_cert_path,
'ca_privatekey': self.ca_privatekey_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
if check_mode:
result.update({
'notBefore': self.notBefore.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.notAfter.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.not_valid_before.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.cert.not_valid_after.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.cert.serial_number,
})
return result
class OwnCACertificate(Certificate):
"""Generate the own CA certificate."""
def __init__(self, module):
super(OwnCACertificate, self).__init__(module, 'pyopenssl')
self.notBefore = self.get_relative_time_option(module.params['ownca_not_before'], 'ownca_not_before')
self.notAfter = self.get_relative_time_option(module.params['ownca_not_after'], 'ownca_not_after')
self.digest = module.params['ownca_digest']
self.version = module.params['ownca_version']
self.serial_number = randint(1000, 99999)
if module.params['ownca_create_subject_key_identifier'] != 'create_if_not_provided':
module.fail_json(msg='ownca_create_subject_key_identifier cannot be used with the pyOpenSSL backend!')
if module.params['ownca_create_authority_key_identifier']:
module.warn('ownca_create_authority_key_identifier is ignored by the pyOpenSSL backend!')
self.ca_cert_path = module.params['ownca_path']
self.ca_privatekey_path = module.params['ownca_privatekey_path']
self.ca_privatekey_passphrase = module.params['ownca_privatekey_passphrase']
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file {0} does not exist'.format(self.csr_path)
)
if not os.path.exists(self.ca_cert_path):
raise CertificateError(
'The CA certificate file {0} does not exist'.format(self.ca_cert_path)
)
if not os.path.exists(self.ca_privatekey_path):
raise CertificateError(
'The CA private key file {0} does not exist'.format(self.ca_privatekey_path)
)
self.csr = crypto_utils.load_certificate_request(self.csr_path)
self.ca_cert = crypto_utils.load_certificate(self.ca_cert_path)
try:
self.ca_privatekey = crypto_utils.load_privatekey(
self.ca_privatekey_path, self.ca_privatekey_passphrase
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
module.fail_json(msg=str(exc))
def generate(self, module):
if not os.path.exists(self.ca_cert_path):
raise CertificateError(
'The CA certificate %s does not exist' % self.ca_cert_path
)
if not os.path.exists(self.ca_privatekey_path):
raise CertificateError(
'The CA private key %s does not exist' % self.ca_privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
cert = crypto.X509()
cert.set_serial_number(self.serial_number)
cert.set_notBefore(to_bytes(self.notBefore))
cert.set_notAfter(to_bytes(self.notAfter))
cert.set_subject(self.csr.get_subject())
cert.set_issuer(self.ca_cert.get_subject())
cert.set_version(self.version - 1)
cert.set_pubkey(self.csr.get_pubkey())
cert.add_extensions(self.csr.get_extensions())
cert.sign(self.ca_privatekey, self.digest)
self.cert = cert
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, crypto.dump_certificate(crypto.FILETYPE_PEM, self.cert))
self.changed = True
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path,
'ca_cert': self.ca_cert_path,
'ca_privatekey': self.ca_privatekey_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
if check_mode:
result.update({
'notBefore': self.notBefore,
'notAfter': self.notAfter,
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.get_notBefore(),
'notAfter': self.cert.get_notAfter(),
'serial_number': self.cert.get_serial_number(),
})
return result
def compare_sets(subset, superset, equality=False):
if equality:
return set(subset) == set(superset)
else:
return all(x in superset for x in subset)
def compare_dicts(subset, superset, equality=False):
if equality:
return subset == superset
else:
return all(superset.get(x) == v for x, v in subset.items())
NO_EXTENSION = 'no extension'
class AssertOnlyCertificateBase(Certificate):
def __init__(self, module, backend):
super(AssertOnlyCertificateBase, self).__init__(module, backend)
self.signature_algorithms = module.params['signature_algorithms']
if module.params['subject']:
self.subject = crypto_utils.parse_name_field(module.params['subject'])
else:
self.subject = []
self.subject_strict = module.params['subject_strict']
if module.params['issuer']:
self.issuer = crypto_utils.parse_name_field(module.params['issuer'])
else:
self.issuer = []
self.issuer_strict = module.params['issuer_strict']
self.has_expired = module.params['has_expired']
self.version = module.params['version']
self.key_usage = module.params['key_usage']
self.key_usage_strict = module.params['key_usage_strict']
self.extended_key_usage = module.params['extended_key_usage']
self.extended_key_usage_strict = module.params['extended_key_usage_strict']
self.subject_alt_name = module.params['subject_alt_name']
self.subject_alt_name_strict = module.params['subject_alt_name_strict']
self.not_before = module.params['not_before']
self.not_after = module.params['not_after']
self.valid_at = module.params['valid_at']
self.invalid_at = module.params['invalid_at']
self.valid_in = module.params['valid_in']
if self.valid_in and not self.valid_in.startswith("+") and not self.valid_in.startswith("-"):
try:
int(self.valid_in)
except ValueError:
module.fail_json(msg='The supplied value for "valid_in" (%s) is not an integer or a valid timespec' % self.valid_in)
self.valid_in = "+" + self.valid_in + "s"
# Load objects
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
if self.privatekey_path is not None:
try:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path,
self.privatekey_passphrase,
backend=self.backend
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
raise CertificateError(exc)
if self.csr_path is not None:
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
@abc.abstractmethod
def _validate_privatekey(self):
pass
@abc.abstractmethod
def _validate_csr_signature(self):
pass
@abc.abstractmethod
def _validate_csr_subject(self):
pass
@abc.abstractmethod
def _validate_csr_extensions(self):
pass
@abc.abstractmethod
def _validate_signature_algorithms(self):
pass
@abc.abstractmethod
def _validate_subject(self):
pass
@abc.abstractmethod
def _validate_issuer(self):
pass
@abc.abstractmethod
def _validate_has_expired(self):
pass
@abc.abstractmethod
def _validate_version(self):
pass
@abc.abstractmethod
def _validate_key_usage(self):
pass
@abc.abstractmethod
def _validate_extended_key_usage(self):
pass
@abc.abstractmethod
def _validate_subject_alt_name(self):
pass
@abc.abstractmethod
def _validate_not_before(self):
pass
@abc.abstractmethod
def _validate_not_after(self):
pass
@abc.abstractmethod
def _validate_valid_at(self):
pass
@abc.abstractmethod
def _validate_invalid_at(self):
pass
@abc.abstractmethod
def _validate_valid_in(self):
pass
def assertonly(self, module):
messages = []
if self.privatekey_path is not None:
if not self._validate_privatekey():
messages.append(
'Certificate %s and private key %s do not match' %
(self.path, self.privatekey_path)
)
if self.csr_path is not None:
if not self._validate_csr_signature():
messages.append(
'Certificate %s and CSR %s do not match: private key mismatch' %
(self.path, self.csr_path)
)
if not self._validate_csr_subject():
messages.append(
'Certificate %s and CSR %s do not match: subject mismatch' %
(self.path, self.csr_path)
)
if not self._validate_csr_extensions():
messages.append(
'Certificate %s and CSR %s do not match: extensions mismatch' %
(self.path, self.csr_path)
)
if self.signature_algorithms is not None:
wrong_alg = self._validate_signature_algorithms()
if wrong_alg:
messages.append(
'Invalid signature algorithm (got %s, expected one of %s)' %
(wrong_alg, self.signature_algorithms)
)
if self.subject is not None:
failure = self._validate_subject()
if failure:
dummy, cert_subject = failure
messages.append(
'Invalid subject component (got %s, expected all of %s to be present)' %
(cert_subject, self.subject)
)
if self.issuer is not None:
failure = self._validate_issuer()
if failure:
dummy, cert_issuer = failure
messages.append(
'Invalid issuer component (got %s, expected all of %s to be present)' % (cert_issuer, self.issuer)
)
if self.has_expired is not None:
cert_expired = self._validate_has_expired()
if cert_expired != self.has_expired:
messages.append(
'Certificate expiration check failed (certificate expiration is %s, expected %s)' %
(cert_expired, self.has_expired)
)
if self.version is not None:
cert_version = self._validate_version()
if cert_version != self.version:
messages.append(
'Invalid certificate version number (got %s, expected %s)' %
(cert_version, self.version)
)
if self.key_usage is not None:
failure = self._validate_key_usage()
if failure == NO_EXTENSION:
messages.append('Found no keyUsage extension')
elif failure:
dummy, cert_key_usage = failure
messages.append(
'Invalid keyUsage components (got %s, expected all of %s to be present)' %
(cert_key_usage, self.key_usage)
)
if self.extended_key_usage is not None:
failure = self._validate_extended_key_usage()
if failure == NO_EXTENSION:
messages.append('Found no extendedKeyUsage extension')
elif failure:
dummy, ext_cert_key_usage = failure
messages.append(
'Invalid extendedKeyUsage component (got %s, expected all of %s to be present)' % (ext_cert_key_usage, self.extended_key_usage)
)
if self.subject_alt_name is not None:
failure = self._validate_subject_alt_name()
if failure == NO_EXTENSION:
messages.append('Found no subjectAltName extension')
elif failure:
dummy, cert_san = failure
messages.append(
'Invalid subjectAltName component (got %s, expected all of %s to be present)' %
(cert_san, self.subject_alt_name)
)
if self.not_before is not None:
cert_not_valid_before = self._validate_not_before()
if cert_not_valid_before != self.get_relative_time_option(self.not_before, 'not_before'):
messages.append(
'Invalid not_before component (got %s, expected %s to be present)' %
(cert_not_valid_before, self.not_before)
)
if self.not_after is not None:
cert_not_valid_after = self._validate_not_after()
if cert_not_valid_after != self.get_relative_time_option(self.not_after, 'not_after'):
messages.append(
'Invalid not_after component (got %s, expected %s to be present)' %
(cert_not_valid_after, self.not_after)
)
if self.valid_at is not None:
not_before, valid_at, not_after = self._validate_valid_at()
if not (not_before <= valid_at <= not_after):
messages.append(
'Certificate is not valid for the specified date (%s) - not_before: %s - not_after: %s' %
(self.valid_at, not_before, not_after)
)
if self.invalid_at is not None:
not_before, invalid_at, not_after = self._validate_invalid_at()
if not_before <= invalid_at <= not_after:
messages.append(
'Certificate is not invalid for the specified date (%s) - not_before: %s - not_after: %s' %
(self.invalid_at, not_before, not_after)
)
if self.valid_in is not None:
not_before, valid_in, not_after = self._validate_valid_in()
if not not_before <= valid_in <= not_after:
messages.append(
'Certificate is not valid in %s from now (that would be %s) - not_before: %s - not_after: %s' %
(self.valid_in, valid_in, not_before, not_after)
)
return messages
def generate(self, module):
"""Don't generate anything - only assert"""
messages = self.assertonly(module)
if messages:
module.fail_json(msg=' | '.join(messages))
def check(self, module, perms_required=False):
"""Ensure the resource is in its desired state."""
messages = self.assertonly(module)
return len(messages) == 0
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path,
}
return result
class AssertOnlyCertificateCryptography(AssertOnlyCertificateBase):
"""Validate the supplied cert, using the cryptography backend"""
def __init__(self, module):
super(AssertOnlyCertificateCryptography, self).__init__(module, 'cryptography')
def _validate_privatekey(self):
return crypto_utils.cryptography_compare_public_keys(self.cert.public_key(), self.privatekey.public_key())
def _validate_csr_signature(self):
if not self.csr.is_signature_valid:
return False
return crypto_utils.cryptography_compare_public_keys(self.csr.public_key(), self.cert.public_key())
def _validate_csr_subject(self):
return self.csr.subject == self.cert.subject
def _validate_csr_extensions(self):
cert_exts = self.cert.extensions
csr_exts = self.csr.extensions
if len(cert_exts) != len(csr_exts):
return False
for cert_ext in cert_exts:
try:
csr_ext = csr_exts.get_extension_for_oid(cert_ext.oid)
if cert_ext != csr_ext:
return False
except cryptography.x509.ExtensionNotFound as dummy:
return False
return True
def _validate_signature_algorithms(self):
if self.cert.signature_algorithm_oid._name not in self.signature_algorithms:
return self.cert.signature_algorithm_oid._name
def _validate_subject(self):
expected_subject = Name([NameAttribute(oid=crypto_utils.cryptography_name_to_oid(sub[0]), value=to_text(sub[1]))
for sub in self.subject])
cert_subject = self.cert.subject
if not compare_sets(expected_subject, cert_subject, self.subject_strict):
return expected_subject, cert_subject
def _validate_issuer(self):
expected_issuer = Name([NameAttribute(oid=crypto_utils.cryptography_name_to_oid(iss[0]), value=to_text(iss[1]))
for iss in self.issuer])
cert_issuer = self.cert.issuer
if not compare_sets(expected_issuer, cert_issuer, self.issuer_strict):
return self.issuer, cert_issuer
def _validate_has_expired(self):
cert_not_after = self.cert.not_valid_after
cert_expired = cert_not_after < datetime.datetime.utcnow()
return cert_expired
def _validate_version(self):
if self.cert.version == x509.Version.v1:
return 1
if self.cert.version == x509.Version.v3:
return 3
return "unknown"
def _validate_key_usage(self):
try:
current_key_usage = self.cert.extensions.get_extension_for_class(x509.KeyUsage).value
test_key_usage = dict(
digital_signature=current_key_usage.digital_signature,
content_commitment=current_key_usage.content_commitment,
key_encipherment=current_key_usage.key_encipherment,
data_encipherment=current_key_usage.data_encipherment,
key_agreement=current_key_usage.key_agreement,
key_cert_sign=current_key_usage.key_cert_sign,
crl_sign=current_key_usage.crl_sign,
encipher_only=False,
decipher_only=False
)
if test_key_usage['key_agreement']:
test_key_usage.update(dict(
encipher_only=current_key_usage.encipher_only,
decipher_only=current_key_usage.decipher_only
))
key_usages = crypto_utils.cryptography_parse_key_usage_params(self.key_usage)
if not compare_dicts(key_usages, test_key_usage, self.key_usage_strict):
return self.key_usage, [k for k, v in test_key_usage.items() if v is True]
except cryptography.x509.ExtensionNotFound:
# This is only bad if the user specified a non-empty list
if self.key_usage:
return NO_EXTENSION
def _validate_extended_key_usage(self):
try:
current_ext_keyusage = self.cert.extensions.get_extension_for_class(x509.ExtendedKeyUsage).value
usages = [crypto_utils.cryptography_name_to_oid(usage) for usage in self.extended_key_usage]
expected_ext_keyusage = x509.ExtendedKeyUsage(usages)
if not compare_sets(expected_ext_keyusage, current_ext_keyusage, self.extended_key_usage_strict):
return [eku.value for eku in expected_ext_keyusage], [eku.value for eku in current_ext_keyusage]
except cryptography.x509.ExtensionNotFound:
# This is only bad if the user specified a non-empty list
if self.extended_key_usage:
return NO_EXTENSION
def _validate_subject_alt_name(self):
try:
current_san = self.cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
expected_san = [crypto_utils.cryptography_get_name(san) for san in self.subject_alt_name]
if not compare_sets(expected_san, current_san, self.subject_alt_name_strict):
return self.subject_alt_name, current_san
except cryptography.x509.ExtensionNotFound:
# This is only bad if the user specified a non-empty list
if self.subject_alt_name:
return NO_EXTENSION
def _validate_not_before(self):
return self.cert.not_valid_before
def _validate_not_after(self):
return self.cert.not_valid_after
def _validate_valid_at(self):
rt = self.get_relative_time_option(self.valid_at, 'valid_at')
return self.cert.not_valid_before, rt, self.cert.not_valid_after
def _validate_invalid_at(self):
rt = self.get_relative_time_option(self.invalid_at, 'invalid_at')
return self.cert.not_valid_before, rt, self.cert.not_valid_after
def _validate_valid_in(self):
valid_in_date = self.get_relative_time_option(self.valid_in, "valid_in")
return self.cert.not_valid_before, valid_in_date, self.cert.not_valid_after
class AssertOnlyCertificate(AssertOnlyCertificateBase):
"""validate the supplied certificate."""
def __init__(self, module):
super(AssertOnlyCertificate, self).__init__(module, 'pyopenssl')
# Ensure inputs are properly sanitized before comparison.
for param in ['signature_algorithms', 'key_usage', 'extended_key_usage',
'subject_alt_name', 'subject', 'issuer', 'not_before',
'not_after', 'valid_at', 'invalid_at']:
attr = getattr(self, param)
if isinstance(attr, list) and attr:
if isinstance(attr[0], str):
setattr(self, param, [to_bytes(item) for item in attr])
elif isinstance(attr[0], tuple):
setattr(self, param, [(to_bytes(item[0]), to_bytes(item[1])) for item in attr])
elif isinstance(attr, tuple):
setattr(self, param, dict((to_bytes(k), to_bytes(v)) for (k, v) in attr.items()))
elif isinstance(attr, dict):
setattr(self, param, dict((to_bytes(k), to_bytes(v)) for (k, v) in attr.items()))
elif isinstance(attr, str):
setattr(self, param, to_bytes(attr))
def _validate_privatekey(self):
ctx = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_2_METHOD)
ctx.use_privatekey(self.privatekey)
ctx.use_certificate(self.cert)
try:
ctx.check_privatekey()
return True
except OpenSSL.SSL.Error:
return False
def _validate_csr_signature(self):
try:
self.csr.verify(self.cert.get_pubkey())
except OpenSSL.crypto.Error:
return False
def _validate_csr_subject(self):
if self.csr.get_subject() != self.cert.get_subject():
return False
def _validate_csr_extensions(self):
csr_extensions = self.csr.get_extensions()
cert_extension_count = self.cert.get_extension_count()
if len(csr_extensions) != cert_extension_count:
return False
for extension_number in range(0, cert_extension_count):
cert_extension = self.cert.get_extension(extension_number)
csr_extension = filter(lambda extension: extension.get_short_name() == cert_extension.get_short_name(), csr_extensions)
if cert_extension.get_data() != list(csr_extension)[0].get_data():
return False
return True
def _validate_signature_algorithms(self):
if self.cert.get_signature_algorithm() not in self.signature_algorithms:
return self.cert.get_signature_algorithm()
def _validate_subject(self):
expected_subject = [(OpenSSL._util.lib.OBJ_txt2nid(sub[0]), sub[1]) for sub in self.subject]
cert_subject = self.cert.get_subject().get_components()
current_subject = [(OpenSSL._util.lib.OBJ_txt2nid(sub[0]), sub[1]) for sub in cert_subject]
if not compare_sets(expected_subject, current_subject, self.subject_strict):
return expected_subject, current_subject
def _validate_issuer(self):
expected_issuer = [(OpenSSL._util.lib.OBJ_txt2nid(iss[0]), iss[1]) for iss in self.issuer]
cert_issuer = self.cert.get_issuer().get_components()
current_issuer = [(OpenSSL._util.lib.OBJ_txt2nid(iss[0]), iss[1]) for iss in cert_issuer]
if not compare_sets(expected_issuer, current_issuer, self.issuer_strict):
return self.issuer, cert_issuer
def _validate_has_expired(self):
# The following 3 lines are the same as the current PyOpenSSL code for cert.has_expired().
# Older version of PyOpenSSL have a buggy implementation,
# to avoid issues with those we added the code from a more recent release here.
time_string = to_native(self.cert.get_notAfter())
not_after = datetime.datetime.strptime(time_string, "%Y%m%d%H%M%SZ")
cert_expired = not_after < datetime.datetime.utcnow()
return cert_expired
def _validate_version(self):
# Version numbers in certs are off by one:
# v1: 0, v2: 1, v3: 2 ...
return self.cert.get_version() + 1
def _validate_key_usage(self):
found = False
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'keyUsage':
found = True
expected_extension = crypto.X509Extension(b"keyUsage", False, b', '.join(self.key_usage))
key_usage = [usage.strip() for usage in to_text(expected_extension, errors='surrogate_or_strict').split(',')]
current_ku = [usage.strip() for usage in to_text(extension, errors='surrogate_or_strict').split(',')]
if not compare_sets(key_usage, current_ku, self.key_usage_strict):
return self.key_usage, str(extension).split(', ')
if not found:
# This is only bad if the user specified a non-empty list
if self.key_usage:
return NO_EXTENSION
def _validate_extended_key_usage(self):
found = False
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'extendedKeyUsage':
found = True
extKeyUsage = [OpenSSL._util.lib.OBJ_txt2nid(keyUsage) for keyUsage in self.extended_key_usage]
current_xku = [OpenSSL._util.lib.OBJ_txt2nid(usage.strip()) for usage in
to_bytes(extension, errors='surrogate_or_strict').split(b',')]
if not compare_sets(extKeyUsage, current_xku, self.extended_key_usage_strict):
return self.extended_key_usage, str(extension).split(', ')
if not found:
# This is only bad if the user specified a non-empty list
if self.extended_key_usage:
return NO_EXTENSION
def _normalize_san(self, san):
# Apparently OpenSSL returns 'IP address' not 'IP' as specifier when converting the subjectAltName to string
# although it won't accept this specifier when generating the CSR. (https://github.com/openssl/openssl/issues/4004)
if san.startswith('IP Address:'):
san = 'IP:' + san[len('IP Address:'):]
if san.startswith('IP:'):
ip = compat_ipaddress.ip_address(san[3:])
san = 'IP:{0}'.format(ip.compressed)
return san
def _validate_subject_alt_name(self):
found = False
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'subjectAltName':
found = True
l_altnames = [self._normalize_san(altname.strip()) for altname in
to_text(extension, errors='surrogate_or_strict').split(', ')]
sans = [self._normalize_san(to_text(san, errors='surrogate_or_strict')) for san in self.subject_alt_name]
if not compare_sets(sans, l_altnames, self.subject_alt_name_strict):
return self.subject_alt_name, l_altnames
if not found:
# This is only bad if the user specified a non-empty list
if self.subject_alt_name:
return NO_EXTENSION
def _validate_not_before(self):
return self.cert.get_notBefore()
def _validate_not_after(self):
return self.cert.get_notAfter()
def _validate_valid_at(self):
rt = self.get_relative_time_option(self.valid_at, "valid_at")
rt = to_bytes(rt, errors='surrogate_or_strict')
return self.cert.get_notBefore(), rt, self.cert.get_notAfter()
def _validate_invalid_at(self):
rt = self.get_relative_time_option(self.invalid_at, "invalid_at")
rt = to_bytes(rt, errors='surrogate_or_strict')
return self.cert.get_notBefore(), rt, self.cert.get_notAfter()
def _validate_valid_in(self):
valid_in_asn1 = self.get_relative_time_option(self.valid_in, "valid_in")
valid_in_date = to_bytes(valid_in_asn1, errors='surrogate_or_strict')
return self.cert.get_notBefore(), valid_in_date, self.cert.get_notAfter()
class EntrustCertificate(Certificate):
"""Retrieve a certificate using Entrust (ECS)."""
def __init__(self, module, backend):
super(EntrustCertificate, self).__init__(module, backend)
self.trackingId = None
self.notAfter = self.get_relative_time_option(module.params['entrust_not_after'], 'entrust_not_after')
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file {0} does not exist'.format(self.csr_path)
)
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
# ECS API defaults to using the validated organization tied to the account.
# We want to always force behavior of trying to use the organization provided in the CSR.
# To that end we need to parse out the organization from the CSR.
self.csr_org = None
if self.backend == 'pyopenssl':
csr_subject = self.csr.get_subject()
csr_subject_components = csr_subject.get_components()
for k, v in csr_subject_components:
if k.upper() == 'O':
# Entrust does not support multiple validated organizations in a single certificate
if self.csr_org is not None:
module.fail_json(msg=("Entrust provider does not currently support multiple validated organizations. Multiple organizations found in "
"Subject DN: '{0}'. ".format(csr_subject)))
else:
self.csr_org = v
elif self.backend == 'cryptography':
csr_subject_orgs = self.csr.subject.get_attributes_for_oid(NameOID.ORGANIZATION_NAME)
if len(csr_subject_orgs) == 1:
self.csr_org = csr_subject_orgs[0].value
elif len(csr_subject_orgs) > 1:
module.fail_json(msg=("Entrust provider does not currently support multiple validated organizations. Multiple organizations found in "
"Subject DN: '{0}'. ".format(self.csr.subject)))
# If no organization in the CSR, explicitly tell ECS that it should be blank in issued cert, not defaulted to
# organization tied to the account.
if self.csr_org is None:
self.csr_org = ''
try:
self.ecs_client = ECSClient(
entrust_api_user=module.params.get('entrust_api_user'),
entrust_api_key=module.params.get('entrust_api_key'),
entrust_api_cert=module.params.get('entrust_api_client_cert_path'),
entrust_api_cert_key=module.params.get('entrust_api_client_cert_key_path'),
entrust_api_specification_path=module.params.get('entrust_api_specification_path')
)
except SessionConfigurationException as e:
module.fail_json(msg='Failed to initialize Entrust Provider: {0}'.format(to_native(e.message)))
def generate(self, module):
if not self.check(module, perms_required=False) or self.force:
# Read the CSR that was generated for us
body = {}
with open(self.csr_path, 'r') as csr_file:
body['csr'] = csr_file.read()
body['certType'] = module.params['entrust_cert_type']
# Handle expiration (30 days if not specified)
expiry = self.notAfter
if not expiry:
gmt_now = datetime.datetime.fromtimestamp(time.mktime(time.gmtime()))
expiry = gmt_now + datetime.timedelta(days=365)
expiry_iso3339 = expiry.strftime("%Y-%m-%dT%H:%M:%S.00Z")
body['certExpiryDate'] = expiry_iso3339
body['org'] = self.csr_org
body['tracking'] = {
'requesterName': module.params['entrust_requester_name'],
'requesterEmail': module.params['entrust_requester_email'],
'requesterPhone': module.params['entrust_requester_phone'],
}
try:
result = self.ecs_client.NewCertRequest(Body=body)
self.trackingId = result.get('trackingId')
except RestOperationException as e:
module.fail_json(msg='Failed to request new certificate from Entrust Certificate Services (ECS): {0}'.format(to_native(e.message)))
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, to_bytes(result.get('endEntityCert')))
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
self.changed = True
def check(self, module, perms_required=True):
"""Ensure the resource is in its desired state."""
parent_check = super(EntrustCertificate, self).check(module, perms_required)
try:
cert_details = self._get_cert_details()
except RestOperationException as e:
module.fail_json(msg='Failed to get status of existing certificate from Entrust Certificate Services (ECS): {0}.'.format(to_native(e.message)))
# Always issue a new certificate if the certificate is expired, suspended or revoked
status = cert_details.get('status', False)
if status == 'EXPIRED' or status == 'SUSPENDED' or status == 'REVOKED':
return False
# If the requested cert type was specified and it is for a different certificate type than the initial certificate, a new one is needed
if module.params['entrust_cert_type'] and cert_details.get('certType') and module.params['entrust_cert_type'] != cert_details.get('certType'):
return False
return parent_check
def _get_cert_details(self):
cert_details = {}
if self.cert:
serial_number = None
expiry = None
if self.backend == 'pyopenssl':
serial_number = "{0:X}".format(self.cert.get_serial_number())
time_string = to_native(self.cert.get_notAfter())
expiry = datetime.datetime.strptime(time_string, "%Y%m%d%H%M%SZ")
elif self.backend == 'cryptography':
serial_number = "{0:X}".format(self.cert.serial_number)
expiry = self.cert.not_valid_after
# get some information about the expiry of this certificate
expiry_iso3339 = expiry.strftime("%Y-%m-%dT%H:%M:%S.00Z")
cert_details['expiresAfter'] = expiry_iso3339
# If a trackingId is not already defined (from the result of a generate)
# use the serial number to identify the tracking Id
if self.trackingId is None and serial_number is not None:
cert_results = self.ecs_client.GetCertificates(serialNumber=serial_number).get('certificates', {})
# Finding 0 or more than 1 result is a very unlikely use case, it simply means we cannot perform additional checks
# on the 'state' as returned by Entrust Certificate Services (ECS). The general certificate validity is
# still checked as it is in the rest of the module.
if len(cert_results) == 1:
self.trackingId = cert_results[0].get('trackingId')
if self.trackingId is not None:
cert_details.update(self.ecs_client.GetCertificate(trackingId=self.trackingId))
return cert_details
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path,
}
if self.backup_file:
result['backup_file'] = self.backup_file
result.update(self._get_cert_details())
return result
class AcmeCertificate(Certificate):
"""Retrieve a certificate using the ACME protocol."""
# Since there's no real use of the backend,
# other than the 'self.check' function, we just pass the backend to the constructor
def __init__(self, module, backend):
super(AcmeCertificate, self).__init__(module, backend)
self.accountkey_path = module.params['acme_accountkey_path']
self.challenge_path = module.params['acme_challenge_path']
self.use_chain = module.params['acme_chain']
def generate(self, module):
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key %s does not exist' % self.privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not os.path.exists(self.accountkey_path):
raise CertificateError(
'The account key %s does not exist' % self.accountkey_path
)
if not os.path.exists(self.challenge_path):
raise CertificateError(
'The challenge path %s does not exist' % self.challenge_path
)
if not self.check(module, perms_required=False) or self.force:
acme_tiny_path = self.module.get_bin_path('acme-tiny', required=True)
command = [acme_tiny_path]
if self.use_chain:
command.append('--chain')
command.extend(['--account-key', self.accountkey_path])
command.extend(['--csr', self.csr_path])
command.extend(['--acme-dir', self.challenge_path])
try:
crt = module.run_command(command, check_rc=True)[1]
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, to_bytes(crt))
self.changed = True
except OSError as exc:
raise CertificateError(exc)
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'accountkey': self.accountkey_path,
'csr': self.csr_path,
}
if self.backup_file:
result['backup_file'] = self.backup_file
return result
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(type='str', default='present', choices=['present', 'absent']),
path=dict(type='path', required=True),
provider=dict(type='str', choices=['acme', 'assertonly', 'entrust', 'ownca', 'selfsigned']),
force=dict(type='bool', default=False,),
csr_path=dict(type='path'),
backup=dict(type='bool', default=False),
select_crypto_backend=dict(type='str', default='auto', choices=['auto', 'cryptography', 'pyopenssl']),
# General properties of a certificate
privatekey_path=dict(type='path'),
privatekey_passphrase=dict(type='str', no_log=True),
# provider: assertonly
signature_algorithms=dict(type='list', elements='str', removed_in_version='2.13'),
subject=dict(type='dict', removed_in_version='2.13'),
subject_strict=dict(type='bool', default=False, removed_in_version='2.13'),
issuer=dict(type='dict', removed_in_version='2.13'),
issuer_strict=dict(type='bool', default=False, removed_in_version='2.13'),
has_expired=dict(type='bool', default=False, removed_in_version='2.13'),
version=dict(type='int', removed_in_version='2.13'),
key_usage=dict(type='list', elements='str', aliases=['keyUsage'], removed_in_version='2.13'),
key_usage_strict=dict(type='bool', default=False, aliases=['keyUsage_strict'], removed_in_version='2.13'),
extended_key_usage=dict(type='list', elements='str', aliases=['extendedKeyUsage'], removed_in_version='2.13'),
extended_key_usage_strict=dict(type='bool', default=False, aliases=['extendedKeyUsage_strict'], removed_in_version='2.13'),
subject_alt_name=dict(type='list', elements='str', aliases=['subjectAltName'], removed_in_version='2.13'),
subject_alt_name_strict=dict(type='bool', default=False, aliases=['subjectAltName_strict'], removed_in_version='2.13'),
not_before=dict(type='str', aliases=['notBefore'], removed_in_version='2.13'),
not_after=dict(type='str', aliases=['notAfter'], removed_in_version='2.13'),
valid_at=dict(type='str', removed_in_version='2.13'),
invalid_at=dict(type='str', removed_in_version='2.13'),
valid_in=dict(type='str', removed_in_version='2.13'),
# provider: selfsigned
selfsigned_version=dict(type='int', default=3),
selfsigned_digest=dict(type='str', default='sha256'),
selfsigned_not_before=dict(type='str', default='+0s', aliases=['selfsigned_notBefore']),
selfsigned_not_after=dict(type='str', default='+3650d', aliases=['selfsigned_notAfter']),
selfsigned_create_subject_key_identifier=dict(
type='str',
default='create_if_not_provided',
choices=['create_if_not_provided', 'always_create', 'never_create']
),
# provider: ownca
ownca_path=dict(type='path'),
ownca_privatekey_path=dict(type='path'),
ownca_privatekey_passphrase=dict(type='str', no_log=True),
ownca_digest=dict(type='str', default='sha256'),
ownca_version=dict(type='int', default=3),
ownca_not_before=dict(type='str', default='+0s'),
ownca_not_after=dict(type='str', default='+3650d'),
ownca_create_subject_key_identifier=dict(
type='str',
default='create_if_not_provided',
choices=['create_if_not_provided', 'always_create', 'never_create']
),
ownca_create_authority_key_identifier=dict(type='bool', default=True),
# provider: acme
acme_accountkey_path=dict(type='path'),
acme_challenge_path=dict(type='path'),
acme_chain=dict(type='bool', default=False),
# provider: entrust
entrust_cert_type=dict(type='str', default='STANDARD_SSL',
choices=['STANDARD_SSL', 'ADVANTAGE_SSL', 'UC_SSL', 'EV_SSL', 'WILDCARD_SSL',
'PRIVATE_SSL', 'PD_SSL', 'CDS_ENT_LITE', 'CDS_ENT_PRO', 'SMIME_ENT']),
entrust_requester_email=dict(type='str'),
entrust_requester_name=dict(type='str'),
entrust_requester_phone=dict(type='str'),
entrust_api_user=dict(type='str'),
entrust_api_key=dict(type='str', no_log=True),
entrust_api_client_cert_path=dict(type='path'),
entrust_api_client_cert_key_path=dict(type='path', no_log=True),
entrust_api_specification_path=dict(type='path', default='https://cloud.entrust.net/EntrustCloud/documentation/cms-api-2.1.0.yaml'),
entrust_not_after=dict(type='str', default='+365d'),
),
supports_check_mode=True,
add_file_common_args=True,
required_if=[
['provider', 'entrust', ['entrust_requester_email', 'entrust_requester_name', 'entrust_requester_phone',
'entrust_api_user', 'entrust_api_key', 'entrust_api_client_cert_path',
'entrust_api_client_cert_key_path']]
]
)
try:
if module.params['state'] == 'absent':
certificate = CertificateAbsent(module)
else:
if module.params['provider'] != 'assertonly' and module.params['csr_path'] is None:
module.fail_json(msg='csr_path is required when provider is not assertonly')
base_dir = os.path.dirname(module.params['path']) or '.'
if not os.path.isdir(base_dir):
module.fail_json(
name=base_dir,
msg='The directory %s does not exist or the file is not a directory' % base_dir
)
provider = module.params['provider']
if provider == 'assertonly':
module.deprecate("The 'assertonly' provider is deprecated; please see the examples of "
"the 'openssl_certificate' module on how to replace it with other modules",
version='2.13')
backend = module.params['select_crypto_backend']
if backend == 'auto':
# Detect what backend we can use
can_use_cryptography = CRYPTOGRAPHY_FOUND and CRYPTOGRAPHY_VERSION >= LooseVersion(MINIMAL_CRYPTOGRAPHY_VERSION)
can_use_pyopenssl = PYOPENSSL_FOUND and PYOPENSSL_VERSION >= LooseVersion(MINIMAL_PYOPENSSL_VERSION)
# If cryptography is available we'll use it
if can_use_cryptography:
backend = 'cryptography'
elif can_use_pyopenssl:
backend = 'pyopenssl'
if module.params['selfsigned_version'] == 2 or module.params['ownca_version'] == 2:
module.warn('crypto backend forced to pyopenssl. The cryptography library does not support v2 certificates')
backend = 'pyopenssl'
# Fail if no backend has been found
if backend == 'auto':
module.fail_json(msg=("Can't detect any of the required Python libraries "
"cryptography (>= {0}) or PyOpenSSL (>= {1})").format(
MINIMAL_CRYPTOGRAPHY_VERSION,
MINIMAL_PYOPENSSL_VERSION))
if backend == 'pyopenssl':
if not PYOPENSSL_FOUND:
module.fail_json(msg=missing_required_lib('pyOpenSSL >= {0}'.format(MINIMAL_PYOPENSSL_VERSION)),
exception=PYOPENSSL_IMP_ERR)
if module.params['provider'] in ['selfsigned', 'ownca', 'assertonly']:
try:
getattr(crypto.X509Req, 'get_extensions')
except AttributeError:
module.fail_json(msg='You need to have PyOpenSSL>=0.15')
module.deprecate('The module is using the PyOpenSSL backend. This backend has been deprecated', version='2.13')
if provider == 'selfsigned':
certificate = SelfSignedCertificate(module)
elif provider == 'acme':
certificate = AcmeCertificate(module, 'pyopenssl')
elif provider == 'ownca':
certificate = OwnCACertificate(module)
elif provider == 'entrust':
certificate = EntrustCertificate(module, 'pyopenssl')
else:
certificate = AssertOnlyCertificate(module)
elif backend == 'cryptography':
if not CRYPTOGRAPHY_FOUND:
module.fail_json(msg=missing_required_lib('cryptography >= {0}'.format(MINIMAL_CRYPTOGRAPHY_VERSION)),
exception=CRYPTOGRAPHY_IMP_ERR)
if module.params['selfsigned_version'] == 2 or module.params['ownca_version'] == 2:
module.fail_json(msg='The cryptography backend does not support v2 certificates, '
'use select_crypto_backend=pyopenssl for v2 certificates')
if provider == 'selfsigned':
certificate = SelfSignedCertificateCryptography(module)
elif provider == 'acme':
certificate = AcmeCertificate(module, 'cryptography')
elif provider == 'ownca':
certificate = OwnCACertificateCryptography(module)
elif provider == 'entrust':
certificate = EntrustCertificate(module, 'cryptography')
else:
certificate = AssertOnlyCertificateCryptography(module)
if module.params['state'] == 'present':
if module.check_mode:
result = certificate.dump(check_mode=True)
result['changed'] = module.params['force'] or not certificate.check(module)
module.exit_json(**result)
certificate.generate(module)
else:
if module.check_mode:
result = certificate.dump(check_mode=True)
result['changed'] = os.path.exists(module.params['path'])
module.exit_json(**result)
certificate.remove(module)
result = certificate.dump()
module.exit_json(**result)
except crypto_utils.OpenSSLObjectError as exc:
module.fail_json(msg=to_native(exc))
if __name__ == "__main__":
main()
| gpl-3.0 | -2,970,852,364,616,998,400 | 2,350,331,141,414,417,000 | 43.548462 | 158 | 0.609906 | false |
fritsvanveen/QGIS | python/plugins/processing/tools/spatialite.py | 3 | 4049 | # -*- coding: utf-8 -*-
"""
***************************************************************************
spatialite.py
---------------------
Date : November 2015
Copyright : (C) 2015 by René-Luc Dhont
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from builtins import str
from builtins import object
__author__ = 'René-Luc Dhont'
__date__ = 'November 2015'
__copyright__ = '(C) 2015, René-Luc Dhont'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.utils import spatialite_connect
class DbError(Exception):
def __init__(self, message, query=None):
# Save error. funny that the variables are in utf-8
self.message = str(message, 'utf-8')
self.query = (str(query, 'utf-8') if query is not None else None)
def __str__(self):
return 'MESSAGE: %s\nQUERY: %s' % (self.message, self.query)
class GeoDB(object):
def __init__(self, uri=None):
self.uri = uri
self.dbname = uri.database()
try:
self.con = spatialite_connect(self.con_info())
except (sqlite.InterfaceError, sqlite.OperationalError) as e:
raise DbError(str(e))
self.has_spatialite = self.check_spatialite()
if not self.has_spatialite:
self.has_spatialite = self.init_spatialite()
def con_info(self):
return str(self.dbname)
def init_spatialite(self):
# Get spatialite version
c = self.con.cursor()
try:
self._exec_sql(c, u'SELECT spatialite_version()')
rep = c.fetchall()
v = [int(a) for a in rep[0][0].split('.')]
vv = v[0] * 100000 + v[1] * 1000 + v[2] * 10
# Add spatialite support
if vv >= 401000:
# 4.1 and above
sql = "SELECT initspatialmetadata(1)"
else:
# Under 4.1
sql = "SELECT initspatialmetadata()"
self._exec_sql_and_commit(sql)
except:
return False
finally:
self.con.close()
try:
self.con = spatialite_connect(self.con_info())
except (sqlite.InterfaceError, sqlite.OperationalError) as e:
raise DbError(str(e))
return self.check_spatialite()
def check_spatialite(self):
try:
c = self.con.cursor()
self._exec_sql(c, u"SELECT CheckSpatialMetaData()")
v = c.fetchone()[0]
self.has_geometry_columns = v == 1 or v == 3
self.has_spatialite4 = v == 3
except Exception:
self.has_geometry_columns = False
self.has_spatialite4 = False
self.has_geometry_columns_access = self.has_geometry_columns
return self.has_geometry_columns
def _exec_sql(self, cursor, sql):
try:
cursor.execute(sql)
except (sqlite.Error, sqlite.ProgrammingError, sqlite.Warning, sqlite.InterfaceError, sqlite.OperationalError) as e:
raise DbError(str(e), sql)
def _exec_sql_and_commit(self, sql):
"""Tries to execute and commit some action, on error it rolls
back the change.
"""
try:
c = self.con.cursor()
self._exec_sql(c, sql)
self.con.commit()
except DbError:
self.con.rollback()
raise
| gpl-2.0 | -307,988,126,468,898,600 | 3,076,892,360,866,282,500 | 31.629032 | 124 | 0.504202 | false |
opencloudinfra/orchestrator | venv/Lib/site-packages/django/templatetags/cache.py | 471 | 3389 | from __future__ import unicode_literals
from django.core.cache import InvalidCacheBackendError, caches
from django.core.cache.utils import make_template_fragment_key
from django.template import (
Library, Node, TemplateSyntaxError, VariableDoesNotExist,
)
register = Library()
class CacheNode(Node):
def __init__(self, nodelist, expire_time_var, fragment_name, vary_on, cache_name):
self.nodelist = nodelist
self.expire_time_var = expire_time_var
self.fragment_name = fragment_name
self.vary_on = vary_on
self.cache_name = cache_name
def render(self, context):
try:
expire_time = self.expire_time_var.resolve(context)
except VariableDoesNotExist:
raise TemplateSyntaxError('"cache" tag got an unknown variable: %r' % self.expire_time_var.var)
try:
expire_time = int(expire_time)
except (ValueError, TypeError):
raise TemplateSyntaxError('"cache" tag got a non-integer timeout value: %r' % expire_time)
if self.cache_name:
try:
cache_name = self.cache_name.resolve(context)
except VariableDoesNotExist:
raise TemplateSyntaxError('"cache" tag got an unknown variable: %r' % self.cache_name.var)
try:
fragment_cache = caches[cache_name]
except InvalidCacheBackendError:
raise TemplateSyntaxError('Invalid cache name specified for cache tag: %r' % cache_name)
else:
try:
fragment_cache = caches['template_fragments']
except InvalidCacheBackendError:
fragment_cache = caches['default']
vary_on = [var.resolve(context) for var in self.vary_on]
cache_key = make_template_fragment_key(self.fragment_name, vary_on)
value = fragment_cache.get(cache_key)
if value is None:
value = self.nodelist.render(context)
fragment_cache.set(cache_key, value, expire_time)
return value
@register.tag('cache')
def do_cache(parser, token):
"""
This will cache the contents of a template fragment for a given amount
of time.
Usage::
{% load cache %}
{% cache [expire_time] [fragment_name] %}
.. some expensive processing ..
{% endcache %}
This tag also supports varying by a list of arguments::
{% load cache %}
{% cache [expire_time] [fragment_name] [var1] [var2] .. %}
.. some expensive processing ..
{% endcache %}
Optionally the cache to use may be specified thus::
{% cache .... using="cachename" %}
Each unique set of arguments will result in a unique cache entry.
"""
nodelist = parser.parse(('endcache',))
parser.delete_first_token()
tokens = token.split_contents()
if len(tokens) < 3:
raise TemplateSyntaxError("'%r' tag requires at least 2 arguments." % tokens[0])
if len(tokens) > 3 and tokens[-1].startswith('using='):
cache_name = parser.compile_filter(tokens[-1][len('using='):])
tokens = tokens[:-1]
else:
cache_name = None
return CacheNode(nodelist,
parser.compile_filter(tokens[1]),
tokens[2], # fragment_name can't be a variable.
[parser.compile_filter(t) for t in tokens[3:]],
cache_name,
)
| gpl-3.0 | 3,914,700,594,701,247,500 | 3,972,698,891,000,512,500 | 35.053191 | 107 | 0.614931 | false |
sernst/cauldron | cauldron/session/display/__init__.py | 1 | 23013 | import json as _json_io
import textwrap
import typing
from datetime import timedelta
import cauldron as _cd
from cauldron import environ
from cauldron import render
from cauldron.render import plots as render_plots
from cauldron.render import texts as render_texts
from cauldron.session import report
def _get_report() -> 'report.Report':
"""Fetches the report associated with the currently running step."""
return _cd.project.get_internal_project().current_step.report
def inspect(source: dict):
"""
Inspects the data and structure of the source dictionary object and
adds the results to the display for viewing.
:param source:
A dictionary object to be inspected.
:return:
"""
r = _get_report()
r.append_body(render.inspect(source))
def header(header_text: str, level: int = 1, expand_full: bool = False):
"""
Adds a text header to the display with the specified level.
:param header_text:
The text to display in the header.
:param level:
The level of the header, which corresponds to the html header
levels, such as <h1>, <h2>, ...
:param expand_full:
Whether or not the header will expand to fill the width of the entire
notebook page, or be constrained by automatic maximum page width. The
default value of False lines the header up with text displays.
"""
r = _get_report()
r.append_body(render.header(
header_text,
level=level,
expand_full=expand_full
))
def text(value: str, preformatted: bool = False):
"""
Adds text to the display. If the text is not preformatted, it will be
displayed in paragraph format. Preformatted text will be displayed
inside a pre tag with a monospace font.
:param value:
The text to display.
:param preformatted:
Whether or not to preserve the whitespace display of the text.
"""
if preformatted:
result = render_texts.preformatted_text(value)
else:
result = render_texts.text(value)
r = _get_report()
r.append_body(result)
r.stdout_interceptor.write_source(
'{}\n'.format(textwrap.dedent(value))
)
def markdown(
source: str = None,
source_path: str = None,
preserve_lines: bool = False,
font_size: float = None,
**kwargs
):
"""
Renders the specified source string or source file using markdown and
adds the resulting HTML to the notebook display.
:param source:
A markdown formatted string.
:param source_path:
A file containing markdown text.
:param preserve_lines:
If True, all line breaks will be treated as hard breaks. Use this
for pre-formatted markdown text where newlines should be retained
during rendering.
:param font_size:
Specifies a relative font size adjustment. The default value is 1.0,
which preserves the inherited font size values. Set it to a value
below 1.0 for smaller font-size rendering and greater than 1.0 for
larger font size rendering.
:param kwargs:
Any variable replacements to make within the string using Jinja2
templating syntax.
"""
r = _get_report()
result = render_texts.markdown(
source=source,
source_path=source_path,
preserve_lines=preserve_lines,
font_size=font_size,
**kwargs
)
r.library_includes += result['library_includes']
r.append_body(result['body'])
r.stdout_interceptor.write_source(
'{}\n'.format(textwrap.dedent(result['rendered']))
)
def json(**kwargs):
"""
Adds the specified data to the the output display window with the
specified key. This allows the user to make available arbitrary
JSON-compatible data to the display for runtime use.
:param kwargs:
Each keyword argument is added to the CD.data object with the
specified key and value.
"""
r = _get_report()
r.append_body(render.json(**kwargs))
r.stdout_interceptor.write_source(
'{}\n'.format(_json_io.dumps(kwargs, indent=2))
)
def plotly(
data: typing.Union[dict, list, typing.Any] = None,
layout: typing.Union[dict, typing.Any] = None,
scale: float = 0.5,
figure: typing.Union[dict, typing.Any] = None,
static: bool = False
):
"""
Creates a Plotly plot in the display with the specified data and
layout.
:param data:
The Plotly trace data to be plotted.
:param layout:
The layout data used for the plot.
:param scale:
The display scale with units of fractional screen height. A value
of 0.5 constrains the output to a maximum height equal to half the
height of browser window when viewed. Values below 1.0 are usually
recommended so the entire output can be viewed without scrolling.
:param figure:
In cases where you need to create a figure instead of separate data
and layout information, you can pass the figure here and leave the
data and layout values as None.
:param static:
If true, the plot will be created without interactivity.
This is useful if you have a lot of plots in your notebook.
"""
r = _get_report()
if not figure and not isinstance(data, (list, tuple)):
data = [data]
if 'plotly' not in r.library_includes:
r.library_includes.append('plotly')
r.append_body(render.plotly(
data=data,
layout=layout,
scale=scale,
figure=figure,
static=static
))
r.stdout_interceptor.write_source('[ADDED] Plotly plot\n')
def table(
data_frame,
scale: float = 0.7,
include_index: bool = False,
max_rows: int = 500,
sample_rows: typing.Optional[int] = None,
formats: typing.Union[
str,
typing.Callable[[typing.Any], str],
typing.Dict[
str,
typing.Union[str, typing.Callable[[typing.Any], str]]
]
] = None
):
"""
Adds the specified data frame to the display in a nicely formatted
scrolling table.
:param data_frame:
The pandas data frame to be rendered to a table.
:param scale:
The display scale with units of fractional screen height. A value
of 0.5 constrains the output to a maximum height equal to half the
height of browser window when viewed. Values below 1.0 are usually
recommended so the entire output can be viewed without scrolling.
:param include_index:
Whether or not the index column should be included in the displayed
output. The index column is not included by default because it is
often unnecessary extra information in the display of the data.
:param max_rows:
This argument exists to prevent accidentally writing very large data
frames to a table, which can cause the notebook display to become
sluggish or unresponsive. If you want to display large tables, you need
only increase the value of this argument.
:param sample_rows:
When set to a positive integer value, the DataFrame will be randomly
sampled to the specified number of rows when displayed in the table.
If the value here is larger than the number of rows in the DataFrame,
the sampling will have no effect and the entire DataFrame will be
displayed instead.
:param formats:
An optional dictionary that, when specified, should contain a mapping
between column names and formatting strings to apply to that column
for display purposes. For example, ``{'foo': '{:,.2f}%'}`` would
transform a column ``foo = [12.2121, 34.987123, 42.72839]`` to
display as ``foo = [12.21%, 34.99%, 42.73%]``. The formatters should
follow the standard Python string formatting guidelines the same as
the ``str.format()`` command having the value of the column as the only
positional argument in the format arguments. A string value can also
be specified for uniform formatting of all columns (or if displaying
a series with only a single value).
"""
r = _get_report()
r.append_body(render.table(
data_frame=data_frame,
scale=scale,
include_index=include_index,
max_rows=max_rows,
sample_rows=sample_rows,
formats=formats
))
r.stdout_interceptor.write_source('[ADDED] Table\n')
def svg(svg_dom: str, filename: str = None):
"""
Adds the specified SVG string to the display. If a filename is
included, the SVG data will also be saved to that filename within the
project results folder.
:param svg_dom:
The SVG string data to add to the display.
:param filename:
An optional filename where the SVG data should be saved within
the project results folder.
"""
r = _get_report()
r.append_body(render.svg(svg_dom))
r.stdout_interceptor.write_source('[ADDED] SVG\n')
if not filename:
return
if not filename.endswith('.svg'):
filename += '.svg'
r.files[filename] = svg_dom
def jinja(path: str, **kwargs):
"""
Renders the specified Jinja2 template to HTML and adds the output to the
display.
:param path:
The fully-qualified path to the template to be rendered.
:param kwargs:
Any keyword arguments that will be use as variable replacements within
the template.
"""
r = _get_report()
r.append_body(render.jinja(path, **kwargs))
r.stdout_interceptor.write_source('[ADDED] Jinja2 rendered HTML\n')
def whitespace(lines: float = 1.0):
"""
Adds the specified number of lines of whitespace.
:param lines:
The number of lines of whitespace to show.
"""
r = _get_report()
r.append_body(render.whitespace(lines))
r.stdout_interceptor.write_source('\n')
def image(
filename: str,
width: int = None,
height: int = None,
justify: str = 'left'
):
"""
Adds an image to the display. The image must be located within the
assets directory of the Cauldron notebook's folder.
:param filename:
Name of the file within the assets directory,
:param width:
Optional width in pixels for the image.
:param height:
Optional height in pixels for the image.
:param justify:
One of 'left', 'center' or 'right', which specifies how the image
is horizontally justified within the notebook display.
"""
r = _get_report()
path = '/'.join(['reports', r.project.uuid, 'latest', 'assets', filename])
r.append_body(render.image(path, width, height, justify))
r.stdout_interceptor.write_source('[ADDED] Image\n')
def html(dom: str):
"""
A string containing a valid HTML snippet.
:param dom:
The HTML string to add to the display.
"""
r = _get_report()
r.append_body(render.html(dom))
r.stdout_interceptor.write_source('[ADDED] HTML\n')
def workspace(show_values: bool = True, show_types: bool = True):
"""
Adds a list of the shared variables currently stored in the project
workspace.
:param show_values:
When true the values for each variable will be shown in addition to
their name.
:param show_types:
When true the data types for each shared variable will be shown in
addition to their name.
"""
r = _get_report()
data = {}
for key, value in r.project.shared.fetch(None).items():
if key.startswith('__cauldron_'):
continue
data[key] = value
r.append_body(render.status(data, values=show_values, types=show_types))
def pyplot(
figure=None,
scale: float = 0.8,
clear: bool = True,
aspect_ratio: typing.Union[list, tuple] = None
):
"""
Creates a matplotlib plot in the display for the specified figure. The size
of the plot is determined automatically to best fit the notebook.
:param figure:
The matplotlib figure to plot. If omitted, the currently active
figure will be used.
:param scale:
The display scale with units of fractional screen height. A value
of 0.5 constrains the output to a maximum height equal to half the
height of browser window when viewed. Values below 1.0 are usually
recommended so the entire output can be viewed without scrolling.
:param clear:
Clears the figure after it has been rendered. This is useful to
prevent persisting old plot data between repeated runs of the
project files. This can be disabled if the plot is going to be
used later in the project files.
:param aspect_ratio:
The aspect ratio for the displayed plot as a two-element list or
tuple. The first element is the width and the second element the
height. The units are "inches," which is an important consideration
for the display of text within the figure. If no aspect ratio is
specified, the currently assigned values to the plot will be used
instead.
"""
r = _get_report()
r.append_body(render_plots.pyplot(
figure,
scale=scale,
clear=clear,
aspect_ratio=aspect_ratio
))
r.stdout_interceptor.write_source('[ADDED] PyPlot plot\n')
def bokeh(model, scale: float = 0.7, responsive: bool = True):
"""
Adds a Bokeh plot object to the notebook display.
:param model:
The plot object to be added to the notebook display.
:param scale:
How tall the plot should be in the notebook as a fraction of screen
height. A number between 0.1 and 1.0. The default value is 0.7.
:param responsive:
Whether or not the plot should responsively scale to fill the width
of the notebook. The default is True.
"""
r = _get_report()
if 'bokeh' not in r.library_includes:
r.library_includes.append('bokeh')
r.append_body(render_plots.bokeh_plot(
model=model,
scale=scale,
responsive=responsive
))
r.stdout_interceptor.write_source('[ADDED] Bokeh plot\n')
def listing(
source: list,
ordered: bool = False,
expand_full: bool = False
):
"""
An unordered or ordered list of the specified *source* iterable where
each element is converted to a string representation for display.
:param source:
The iterable to display as a list.
:param ordered:
Whether or not the list should be ordered. If False, which is the
default, an unordered bulleted list is created.
:param expand_full:
Whether or not the list should expand to fill the screen horizontally.
When defaulted to False, the list is constrained to the center view
area of the screen along with other text. This can be useful to keep
lists aligned with the text flow.
"""
r = _get_report()
r.append_body(render.listing(
source=source,
ordered=ordered,
expand_full=expand_full
))
r.stdout_interceptor.write_source('[ADDED] Listing\n')
def list_grid(
source: list,
expand_full: bool = False,
column_count: int = 2,
row_spacing: float = 1.0
):
"""
An multi-column list of the specified *source* iterable where
each element is converted to a string representation for display.
:param source:
The iterable to display as a list.
:param expand_full:
Whether or not the list should expand to fill the screen horizontally.
When defaulted to False, the list is constrained to the center view
area of the screen along with other text. This can be useful to keep
lists aligned with the text flow.
:param column_count:
The number of columns to display. The specified count is applicable to
high-definition screens. For Lower definition screens the actual count
displayed may be fewer as the layout responds to less available
horizontal screen space.
:param row_spacing:
The number of lines of whitespace to include between each row in the
grid. Set this to 0 for tightly displayed lists.
"""
r = _get_report()
r.append_body(render.list_grid(
source=source,
expand_full=expand_full,
column_count=column_count,
row_spacing=row_spacing
))
r.stdout_interceptor.write_source('[ADDED] List grid\n')
def latex(source: str):
"""
Add a mathematical equation in latex math-mode syntax to the display.
Instead of the traditional backslash escape character, the @ character is
used instead to prevent backslash conflicts with Python strings. For
example, \\delta would be @delta.
:param source:
The string representing the latex equation to be rendered.
"""
r = _get_report()
if 'katex' not in r.library_includes:
r.library_includes.append('katex')
r.append_body(render_texts.latex(source.replace('@', '\\')))
r.stdout_interceptor.write_source('[ADDED] Latex equation\n')
def head(source, count: int = 5):
"""
Displays a specified number of elements in a source object of many
different possible types.
:param source:
DataFrames will show *count* rows of that DataFrame. A list, tuple or
other iterable, will show the first *count* rows. Dictionaries will
show *count* keys from the dictionary, which will be randomly selected
unless you are using an OrderedDict. Strings will show the first
*count* characters.
:param count:
The number of elements to show from the source.
"""
r = _get_report()
r.append_body(render_texts.head(source, count=count))
r.stdout_interceptor.write_source('[ADDED] Head\n')
def tail(source, count: int = 5):
"""
The opposite of the head function. Displays the last *count* elements of
the *source* object.
:param source:
DataFrames will show the last *count* rows of that DataFrame. A list,
tuple or other iterable, will show the last *count* rows. Dictionaries
will show *count* keys from the dictionary, which will be randomly
selected unless you are using an OrderedDict. Strings will show the
last *count* characters.
:param count:
The number of elements to show from the source.
"""
r = _get_report()
r.append_body(render_texts.tail(source, count=count))
r.stdout_interceptor.write_source('[ADDED] Tail\n')
def status(
message: str = None,
progress: float = None,
section_message: str = None,
section_progress: float = None,
):
"""
Updates the status display, which is only visible while a step is running.
This is useful for providing feedback and information during long-running
steps.
A section progress is also available for cases where long running tasks
consist of multiple tasks and you want to display sub-progress messages
within the context of the larger status.
Note: this is only supported when running in the Cauldron desktop
application.
:param message:
The status message you want to display. If left blank the previously
set status message will be retained. Should you desire to remove an
existing message, specify a blank string for this argument.
:param progress:
A number between zero and one that indicates the overall progress for
the current status. If no value is specified, the previously assigned
progress will be retained.
:param section_message:
The status message you want to display for a particular task within a
long-running step. If left blank the previously set section message
will be retained. Should you desire to remove an existing message,
specify a blank string for this argument.
:param section_progress:
A number between zero and one that indicates the progress for the
current section status. If no value is specified, the previously
assigned section progress value will be retained.
"""
environ.abort_thread()
r = _get_report()
step = _cd.project.get_internal_project().current_step
changes = 0
has_changed = step.progress_message != message
if message is not None and has_changed:
changes += 1
step.progress_message = message
has_changed = step.progress_message != max(0, min(1, progress or 0))
if progress is not None and has_changed:
changes += 1
step.progress = max(0.0, min(1.0, progress))
has_changed = step.sub_progress_message != section_message
if section_message is not None and has_changed:
changes += 1
step.sub_progress_message = section_message
has_changed = step.sub_progress != max(0, min(1, section_progress or 0))
if section_progress is not None and has_changed:
changes += 1
step.sub_progress = section_progress
if changes > 0:
# update the timestamp to inform rendering that a status
# has changed and should be re-rendered into the step.
r.update_last_modified()
def code_block(
code: str = None,
path: str = None,
language_id: str = None,
title: str = None,
caption: str = None
):
"""
Adds a block of syntax highlighted code to the display from either
the supplied code argument, or from the code file specified
by the path argument.
:param code:
A string containing the code to be added to the display
:param path:
A path to a file containing code to be added to the display
:param language_id:
The language identifier that indicates what language should
be used by the syntax highlighter. Valid values are any of the
languages supported by the Pygments highlighter.
:param title:
If specified, the code block will include a title bar with the
value of this argument
:param caption:
If specified, the code block will include a caption box below the code
that contains the value of this argument
"""
environ.abort_thread()
r = _get_report()
r.append_body(render.code_block(
block=code,
path=path,
language=language_id,
title=title,
caption=caption
))
r.stdout_interceptor.write_source('{}\n'.format(code))
def elapsed():
"""
Displays the elapsed time since the step started running.
"""
environ.abort_thread()
step = _cd.project.get_internal_project().current_step
r = _get_report()
r.append_body(render.elapsed_time(step.elapsed_time))
result = '[ELAPSED]: {}\n'.format(timedelta(seconds=step.elapsed_time))
r.stdout_interceptor.write_source(result)
| mit | -9,176,420,617,878,233,000 | 2,874,806,094,395,931,600 | 33.502249 | 79 | 0.655629 | false |
julienmalard/Tikon | pruebas/test_calibrador/test_spotpy.py | 1 | 1920 | import unittest
from warnings import warn as avisar
import scipy.stats as estad
from pruebas.test_central.rcrs.modelo_calib import generar
from tikon.calibrador.spotpy_ import EMV, RS, BDD, CMEDZ, MC, MLH, CAACAA, CAA, ECBUA, ERP, CMMC, CalibSpotPy
from tikon.ecs.aprioris import APrioriDist
class PruebaSpotPy(unittest.TestCase):
def test_algs(símismo):
for alg in [EMV, RS, BDD, CMEDZ, MC, MLH, CAACAA, CAA, ECBUA, ERP, CMMC]:
with símismo.subTest(alg.__name__):
gen = generar()
modelo = gen['modelo']
exper = gen['exper']
modelo.calibrar('calib', exper, calibrador=alg(), n_iter=30)
valid = modelo.simular('valid', exper, calibs=['calib']).validar()
if valid['ens'] < 0.90:
avisar('Algoritmo {alg} no funcion muy bien.'.format(alg=alg.__name__))
def test_dists(símismo):
dists_aprioris = {
'Normal': estad.norm(1, 2),
'Uniforme': estad.uniform(0, 3),
'LogNormal': estad.lognorm(1, 0, 2),
'Chi2': estad.chi2(1, 0, 2),
'Exponencial': estad.expon(0, 2),
'Gamma': estad.gamma(1, 0, 1),
'Triang': estad.triang(0.5, 0, 2)
}
for nmbre in CalibSpotPy.dists_disp:
dist = dists_aprioris[nmbre]
with símismo.subTest(nmbre):
gen = generar()
modelo = gen['modelo']
exper = gen['exper']
coso = gen['coso']
apr = APrioriDist(dist)
coso.espec_apriori(apr, categ='categ', sub_categ='subcateg', ec='ec', prm='a')
modelo.calibrar('calib', exper, n_iter=30)
coso.borrar_aprioris()
valid = modelo.simular('valid', exper, calibs=['calib']).validar()
símismo.assertGreater(valid['ens'], 0.95)
| agpl-3.0 | -4,639,911,715,147,927,000 | -1,156,453,795,703,731,700 | 40.630435 | 109 | 0.544648 | false |
spreeker/democracygame | external_apps/docutils-snapshot/docutils/parsers/rst/directives/html.py | 61 | 3223 | # $Id: html.py 4667 2006-07-12 21:40:56Z wiemann $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
Directives for typically HTML-specific constructs.
"""
__docformat__ = 'reStructuredText'
import sys
from docutils import nodes, utils
from docutils.parsers.rst import Directive
from docutils.parsers.rst import states
from docutils.transforms import components
class MetaBody(states.SpecializedBody):
class meta(nodes.Special, nodes.PreBibliographic, nodes.Element):
"""HTML-specific "meta" element."""
pass
def field_marker(self, match, context, next_state):
"""Meta element."""
node, blank_finish = self.parsemeta(match)
self.parent += node
return [], next_state, []
def parsemeta(self, match):
name = self.parse_field_marker(match)
indented, indent, line_offset, blank_finish = \
self.state_machine.get_first_known_indented(match.end())
node = self.meta()
pending = nodes.pending(components.Filter,
{'component': 'writer',
'format': 'html',
'nodes': [node]})
node['content'] = ' '.join(indented)
if not indented:
line = self.state_machine.line
msg = self.reporter.info(
'No content for meta tag "%s".' % name,
nodes.literal_block(line, line),
line=self.state_machine.abs_line_number())
return msg, blank_finish
tokens = name.split()
try:
attname, val = utils.extract_name_value(tokens[0])[0]
node[attname.lower()] = val
except utils.NameValueError:
node['name'] = tokens[0]
for token in tokens[1:]:
try:
attname, val = utils.extract_name_value(token)[0]
node[attname.lower()] = val
except utils.NameValueError, detail:
line = self.state_machine.line
msg = self.reporter.error(
'Error parsing meta tag attribute "%s": %s.'
% (token, detail), nodes.literal_block(line, line),
line=self.state_machine.abs_line_number())
return msg, blank_finish
self.document.note_pending(pending)
return pending, blank_finish
class Meta(Directive):
has_content = True
SMkwargs = {'state_classes': (MetaBody,)}
def run(self):
self.assert_has_content()
node = nodes.Element()
new_line_offset, blank_finish = self.state.nested_list_parse(
self.content, self.content_offset, node,
initial_state='MetaBody', blank_finish=1,
state_machine_kwargs=self.SMkwargs)
if (new_line_offset - self.content_offset) != len(self.content):
# incomplete parse of block?
error = self.state_machine.reporter.error(
'Invalid meta directive.',
nodes.literal_block(self.block_text, self.block_text),
line=self.lineno)
node += error
return node.children
| bsd-3-clause | -1,075,451,093,916,529,500 | 5,067,523,411,890,987,000 | 35.625 | 73 | 0.572448 | false |
aaxelb/SHARE | share/harvesters/org_dataone.py | 3 | 1426 | from furl import furl
from lxml import etree
from share.harvest import BaseHarvester
class DataOneHarvester(BaseHarvester):
VERSION = 1
def do_harvest(self, start_date, end_date):
end_date = end_date.format('YYYY-MM-DDT00:00:00', formatter='alternative') + 'Z'
start_date = start_date.format('YYYY-MM-DDT00:00:00', formatter='alternative') + 'Z'
url = furl(self.config.base_url).set(query_params={
'q': 'dateModified:[{} TO {}]'.format(start_date, end_date),
'start': 0,
'rows': 1
}).url
return self.fetch_records(url, start_date, end_date)
def fetch_records(self, url, start_date, end_date):
resp = self.requests.get(url)
doc = etree.XML(resp.content)
total_records = int(doc.xpath("//result/@numFound")[0])
records_processed = 0
while records_processed < total_records:
response = self.requests.get(furl(url).set(query_params={
'q': 'dateModified:[{} TO {}]'.format(start_date, end_date),
'start': records_processed,
'rows': 1000
}).url)
docs = etree.XML(response.content).xpath('//doc')
for doc in docs:
doc_id = doc.xpath("str[@name='id']")[0].text
doc = etree.tostring(doc)
yield (doc_id, doc)
records_processed += len(docs)
| apache-2.0 | -7,929,152,598,949,831,000 | 208,310,824,600,992,830 | 32.162791 | 92 | 0.563815 | false |
shehzaadn/phoenix | bin/sqlline-thin.py | 4 | 6499 | #!/usr/bin/env python
############################################################################
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
############################################################################
import os
import subprocess
import sys
import phoenix_utils
import atexit
import urlparse
import argparse
global childProc
childProc = None
def kill_child():
if childProc is not None:
childProc.terminate()
childProc.kill()
if os.name != 'nt':
os.system("reset")
atexit.register(kill_child)
parser = argparse.ArgumentParser(description='Launches the Apache Phoenix Thin Client.')
# Positional argument "url" is optional
parser.add_argument('url', nargs='?', help='The URL to the Phoenix Query Server.', default='http://localhost:8765')
# Positional argument "sqlfile" is optional
parser.add_argument('sqlfile', nargs='?', help='A file of SQL commands to execute.', default='')
# Avatica wire authentication
parser.add_argument('-a', '--authentication', help='Mechanism for HTTP authentication.', choices=('SPNEGO', 'BASIC', 'DIGEST', 'NONE'), default='')
# Avatica wire serialization
parser.add_argument('-s', '--serialization', help='Serialization type for HTTP API.', choices=('PROTOBUF', 'JSON'), default=None)
# Avatica authentication
parser.add_argument('-au', '--auth-user', help='Username for HTTP authentication.')
parser.add_argument('-ap', '--auth-password', help='Password for HTTP authentication.')
# Common arguments across sqlline.py and sqlline-thin.py
phoenix_utils.common_sqlline_args(parser)
# Parse the args
args=parser.parse_args()
phoenix_utils.setPath()
url = args.url
sqlfile = args.sqlfile
serialization_key = 'phoenix.queryserver.serialization'
def cleanup_url(url):
parsed = urlparse.urlparse(url)
if parsed.scheme == "":
url = "http://" + url
parsed = urlparse.urlparse(url)
if ":" not in parsed.netloc:
url = url + ":8765"
return url
def get_serialization():
default_serialization='PROTOBUF'
env=os.environ.copy()
if os.name == 'posix':
hbase_exec_name = 'hbase'
elif os.name == 'nt':
hbase_exec_name = 'hbase.cmd'
else:
print 'Unknown platform "%s", defaulting to HBase executable of "hbase"' % os.name
hbase_exec_name = 'hbase'
hbase_cmd = phoenix_utils.which(hbase_exec_name)
if hbase_cmd is None:
print 'Failed to find hbase executable on PATH, defaulting serialization to %s.' % default_serialization
return default_serialization
env['HBASE_CONF_DIR'] = phoenix_utils.hbase_conf_dir
proc = subprocess.Popen([hbase_cmd, 'org.apache.hadoop.hbase.util.HBaseConfTool', serialization_key],
env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
if proc.returncode != 0:
print 'Failed to extract serialization from hbase-site.xml, defaulting to %s.' % default_serialization
return default_serialization
# Don't expect this to happen, but give a default value just in case
if stdout is None:
return default_serialization
stdout = stdout.strip()
if stdout == 'null':
return default_serialization
return stdout
url = cleanup_url(url)
if sqlfile != "":
sqlfile = "--run=" + sqlfile
colorSetting = args.color
# disable color setting for windows OS
if os.name == 'nt':
colorSetting = "false"
# HBase configuration folder path (where hbase-site.xml reside) for
# HBase/Phoenix client side property override
hbase_config_path = os.getenv('HBASE_CONF_DIR', phoenix_utils.current_dir)
serialization = args.serialization if args.serialization else get_serialization()
java_home = os.getenv('JAVA_HOME')
# load hbase-env.??? to extract JAVA_HOME, HBASE_PID_DIR, HBASE_LOG_DIR
hbase_env_path = None
hbase_env_cmd = None
if os.name == 'posix':
hbase_env_path = os.path.join(hbase_config_path, 'hbase-env.sh')
hbase_env_cmd = ['bash', '-c', 'source %s && env' % hbase_env_path]
elif os.name == 'nt':
hbase_env_path = os.path.join(hbase_config_path, 'hbase-env.cmd')
hbase_env_cmd = ['cmd.exe', '/c', 'call %s & set' % hbase_env_path]
if not hbase_env_path or not hbase_env_cmd:
print >> sys.stderr, "hbase-env file unknown on platform %s" % os.name
sys.exit(-1)
hbase_env = {}
if os.path.isfile(hbase_env_path):
p = subprocess.Popen(hbase_env_cmd, stdout = subprocess.PIPE)
for x in p.stdout:
(k, _, v) = x.partition('=')
hbase_env[k.strip()] = v.strip()
if hbase_env.has_key('JAVA_HOME'):
java_home = hbase_env['JAVA_HOME']
if java_home:
java = os.path.join(java_home, 'bin', 'java')
else:
java = 'java'
jdbc_url = 'jdbc:phoenix:thin:url=' + url + ';serialization=' + serialization
if args.authentication:
jdbc_url += ';authentication=' + args.authentication
if args.auth_user:
jdbc_url += ';avatica_user=' + args.auth_user
if args.auth_password:
jdbc_url += ';avatica_password=' + args.auth_password
java_cmd = java + ' $PHOENIX_OPTS ' + \
' -cp "' + phoenix_utils.hbase_conf_dir + os.pathsep + phoenix_utils.phoenix_thin_client_jar + \
os.pathsep + phoenix_utils.hadoop_conf + os.pathsep + phoenix_utils.hadoop_classpath + '" -Dlog4j.configuration=file:' + \
os.path.join(phoenix_utils.current_dir, "log4j.properties") + \
" org.apache.phoenix.queryserver.client.SqllineWrapper -d org.apache.phoenix.queryserver.client.Driver " + \
' -u "' + jdbc_url + '"' + " -n none -p none " + \
" --color=" + colorSetting + " --fastConnect=" + args.fastconnect + " --verbose=" + args.verbose + \
" --incremental=false --isolation=TRANSACTION_READ_COMMITTED " + sqlfile
exitcode = subprocess.call(java_cmd, shell=True)
sys.exit(exitcode)
| apache-2.0 | -9,168,564,234,540,252,000 | 1,939,058,827,456,551,000 | 37.684524 | 147 | 0.67395 | false |
fanchao01/spider | mini_spider/log.py | 1 | 2513 | #!/usr/bin/env python
#-*- coding:utf-8 -*-
"""
This module provide configure file management service in i18n environment.
"""
import os
import logging
import logging.handlers
_LOG_FORMAT = "%(levelname)s: %(asctime)s: %(filename)s:%(lineno)d * %(thread)d %(message)s"
_LOG_DATEFMT = "%m-%d %H:%M:%S"
def init_log(log_path, level=logging.INFO, when="D", backup=7,
format=_LOG_FORMAT, datefmt=_LOG_DATEFMT):
"""
init_log - initialize log module
Args:
log_path - Log file path prefix.
Log data will go to two files: log_path.log and log_path.log.wf
Any non-exist parent directories will be created automatically
level - msg above the level will be displayed
DEBUG < INFO < WARNING < ERROR < CRITICAL
the default value is logging.INFO
when - how to split the log file by time interval
'S' : Seconds
'M' : Minutes
'H' : Hours
'D' : Days
'W' : Week day
default value: 'D'
format - format of the log
default format:
%(levelname)s: %(asctime)s: %(filename)s:%(lineno)d * %(thread)d %(message)s
INFO: 12-09 18:02:42: log.py:40 * 139814749787872 HELLO WORLD
backup - how many backup file to keep
default value: 7
Raises:
OSError: fail to create log directories
IOError: fail to open log file
"""
formatter = logging.Formatter(format, datefmt)
logger = logging.getLogger()
logger.setLevel(level)
dir = os.path.dirname(log_path)
if not os.path.isdir(dir):
os.makedirs(dir)
handler = logging.handlers.TimedRotatingFileHandler(log_path + ".log",
when=when,
backupCount=backup)
handler.setLevel(level)
handler.setFormatter(formatter)
logger.addHandler(handler)
handler = logging.handlers.TimedRotatingFileHandler(log_path + ".log.wf",
when=when,
backupCount=backup)
handler.setLevel(logging.WARNING)
handler.setFormatter(formatter)
logger.addHandler(handler)
if __name__ == '__main__':
init_log('./log')
| gpl-2.0 | -8,984,930,438,302,455,000 | -4,334,983,557,415,561,000 | 34.9 | 98 | 0.524473 | false |
LubyRuffy/pr0bescan | plugins/hostcdn.py | 2 | 1504 | #/usr/bin/env python
#coding=utf-8
from libs.core.common import logging,runtime
from libs.core.common import print_color
import libs.DNS as DNS
log = logging.getLogger(__name__)
def output(target):
customHeaders = ['x-powered-by-360wzb',
'x-powered-by-anquanbao','x-cache','webluker-edge',
'powered-by-chinacache']
cnames = ['360wzb','incapdns','aqb.so']
target.iscdn = False
print_color('Test CDN for %s'%target.ip, 2)
print_color('Test CDN for %s with HTTP header'%target.f_domain, 2)
if any('cdn' in header for header in target.header):
target.iscdn = True
if not target.iscdn:
flag = set(target.header).intersection(set(customHeaders))
target.iscdn = True if len(flag) else None
if not target.iscdn and target.f_domain:
try:
print_color('Test CDN for %s with CNAME'%target.f_domain, 2)
r = DNS.DnsRequest(target.f_domain, qtype="CNAME",
server=['8.8.8.8'], protocol='tcp', timeout=10)
res = r.req()
if len(res.answers) > 0:
cname = res.answers[0]['data']
# 值得学习
if any(cname_str in cname for cname_str in cnames):
target.iscdn = True
except:
log.exception('exception')
print_color(__name__+' faild', 0)
if target.iscdn:
print_color(target.iscdn, 1)
print('')
| gpl-2.0 | -4,292,395,338,735,569,000 | 3,961,127,007,505,145,300 | 30.521739 | 72 | 0.562166 | false |
cosmoharrigan/opencog | opencog/python/learning/incremental_learner/incremental_learner.py | 34 | 7004 | __author__ = 'raminbarati'
import networkx as nx
from util import switch
from modification import Modification
class Runnable:
"""INTERFACE"""
def run(self):
pass
class IncrementalLearner:
"""INTERFACE"""
def construct_join_tree(self, graph):
pass
def construct_mpd_tree(self, joinTree, moralisedGraph):
pass
def incremental_compilation(self, modificationList):
pass
# pretty self explanatory
class IncrementalLearnerBase(object, Runnable, IncrementalLearner):
"""ABSTRACT, INCOMPLETE"""
def __init__(self, old_network):
self._old_network = old_network
self._graph_m = None
self._jt = None
self._jt_mpd = None
self._initialized = False
self._marked = []
def moralize(self,directed_graph):
gm = directed_graph.to_undirected()
for node in directed_graph.nodes_iter():
pres = directed_graph.predecessors(node)
for i in range(0,len(pres),1):
for j in range(i+1,len(pres),1):
gm.add_edge(pres[i],pres[j])
return gm
# input graph should not be directed, use moralize first
def triangulate(self, graph):
# implemented in a child class
pass
def thin_out_graph(self, graph):
# override in a child class if triangulation isn't minimal
return graph
def clique_decomposition(self, graph):
cluster_graph = nx.Graph()
cliques = list(graph.subgraph(c) for c in nx.find_cliques(graph))
while cliques:
clique_i = cliques.pop()
for clique in cliques:
clique_j = clique
shared = set(clique_i).intersection(set(clique_j))
if len(shared) > 0:
cluster_graph.add_edge(clique_i, clique_j, {'label':shared, 'weight':1.0/len(shared)})
j_tree = nx.minimum_spanning_tree(cluster_graph)
return j_tree
def construct_join_tree(self, graph):
graph_m = self.moralize(graph)
graph_t = self.triangulate(graph_m)
graph_min = self.thin_out_graph(graph_t)
jt_min = self.clique_decomposition(graph_min)
return jt_min
def construct_mpd_tree(self, jt_min, graph_m):
def is_complete(nbunch):
sub_g = graph_m.subgraph(nbunch)
n = len(nbunch)
if n == 1:
return True
m = sub_g.size()
if n*(n-1)/2 == m:
return True
return False
def aggregate(node_i,node_j):
union = set(node_i).union(set(node_j))
sub_g = graph_m.subgraph(union)
jt_mpd.add_node(sub_g)
sub_g_n = set(sub_g)
neigh = set(jt_mpd[node_i]).union(jt_mpd[node_j])
for n_i in neigh:
sep = set(n_i).intersection(sub_g_n)
jt_mpd.add_edge(n_i,sub_g, {'label':sep})
jt_mpd.remove_node(node_i)
jt_mpd.remove_node(node_j)
jt_mpd = jt_min.copy()
while True:
nodes = jt_mpd.nodes()
complete = True
for node in nodes:
for neighbor in jt_mpd[node]:
seperator = jt_mpd[neighbor][node]['label']
if not is_complete(seperator):
complete = False
aggregate(neighbor,node)
break
if not complete:
break
if complete:
break
return jt_mpd
def incremental_compilation(self, modificationList):
for modification in modificationList:
L = self.modify_moral_graph(modification)
for case in switch(modification.type):
if case(Modification.ADD_NODE):
self.add_node(modification.data)
break
if case(Modification.REMOVE_NODE):
self.remove_node(modification.data)
break
if case(Modification.ADD_LINK):
self.mark_affected_mps_by_add_link(L)
break
if case(Modification.REMOVE_LINK):
self.mark_affected_mps_by_remove_link(None,None,L)
break
if case():
pass
raise Exception("not implemented")
def modify_moral_graph(self, modification):
L = []
for case in switch(modification.type):
if case(Modification.ADD_NODE):
self._graph_m.add_node(modification.data)
break
if case(Modification.REMOVE_NODE):
self._graph_m.remove_node(modification.data)
break
if case(Modification.ADD_LINK):
pair = set(modification.data)
parents = set(self._old_network.predecessors(modification.data[1]))
nodes = pair.union(parents)
subgraph = self._graph_m.subgraph(nodes)
complement = nx.complement(subgraph)
for edge in complement.edges_iter():
L.append(edge)
break
if case(Modification.REMOVE_LINK):
head = modification.data[1]
tail = modification.data[0]
children_head = set(self._old_network.successors(head))
children_tail = set(self._old_network.successors(tail))
if len(children_tail.intersection(children_head)) <= 0:
self._graph_m.remove_edge(modification.data)
L.append(modification.data)
for parent in self._old_network.predecessors_iter(head):
if parent == tail: continue
children_z_i = set(self._old_network.successors(parent)).intersection(children_tail)
if not len(children_z_i) == 1: continue
if head not in children_z_i: continue
if not self._old_network.has_edge(parent,tail): continue
if self._old_network.has_edge(tail, parent): continue
self._graph_m.remove_edge(tail,parent)
L.append((tail,parent))
break
if case():
raise Exception('Not a defined modification')
return L
def connect(self, clusterTree, cluster_i, cluster_j):
raise Exception("not implemented")
def mark_affected_mps_by_remove_link(self, mps_y, mps_z, linkList):
raise Exception("not implemented")
def remove_node(self, node, mps_x = None, mps_y = None):
raise Exception("not implemented")
def add_node(self, node):
C_x = nx.Graph()
C_x.add_node(node, {'marked':False})
self._jt.add_node(C_x)
self._jt_mpd.add_node(C_x)
def mark_affected_mps_by_add_link(self, linkList):
raise Exception("not implemented") | agpl-3.0 | -5,530,054,051,587,565,000 | -707,273,138,266,422,900 | 34.025 | 106 | 0.540834 | false |
mpasternak/django-interval-field | interval/fields.py | 2 | 5391 | # -*- encoding: utf-8 -*-
from django.db import models
from django.db.models.fields.subclassing import SubfieldBase
from django.utils.text import capfirst
from django.utils.translation import ugettext_lazy as _
from datetime import timedelta
import six
from interval.forms import IntervalFormField
day_seconds = 24 * 60 * 60
microseconds = 1000000
def formatError(value):
raise ValueError(
"please use [[DD]D days,]HH:MM:SS[.ms] instead of %r" % value)
def timedelta_topgsqlstring(value):
buf = []
for attr in ['days', 'seconds', 'microseconds']:
v = getattr(value, attr)
if v:
buf.append('%i %s' % (v, attr.upper()))
if not buf:
return '0'
return " ".join(buf)
def timedelta_tobigint(value):
return (
value.days * day_seconds * microseconds
+ value.seconds * microseconds
+ value.microseconds
)
def range_check(value, name, min=None, max=None):
try:
value = int(value)
except (TypeError, ValueError):
raise ValueError("%s is not an integer" % value)
if min is not None:
if value < min:
raise ValueError("%s is less than %s" % (value, min))
if max is not None:
if value > max:
raise ValueError("%s is more than %s" % (value, max))
return value
class IntervalField(six.with_metaclass(SubfieldBase, models.Field)):
"""This is a field, which maps to Python's datetime.timedelta.
For PostgreSQL, its type is INTERVAL - a native interval type.
- http://www.postgresql.org/docs/8.4/static/datatype-datetime.html
For other databases, its type is BIGINT and timedelta value is stored
as number of seconds * 1000000 .
"""
description = _("interval")
def __init__(
self, verbose_name=None, min_value=None, max_value=None, format=None,
*args, **kw):
models.Field.__init__(
self, verbose_name=verbose_name, *args, **kw)
self.min_value = min_value
self.max_value = max_value
self.format = format
if self.min_value is not None and self.max_value is not None:
if self.min_value >= self.max_value:
raise ValueError('min_value >= max_value')
def db_type(self, connection):
if connection.settings_dict['ENGINE'].find('postgresql') >= 0 or \
connection.settings_dict['ENGINE'].find('postgis') >= 0:
return 'INTERVAL'
return 'BIGINT'
def to_python(self, value):
if isinstance(value, timedelta):
# psycopg2 will return a timedelta() for INTERVAL type column
# in database
return value
if value is None or value is '' or value is u'':
return None
# string forms: in form like "X days, HH:MM:SS.ms" (can be used in
# fixture files)
if isinstance(value, six.string_types) and value.find(":") >= 0:
days = 0
if value.find("days,") >= 0 or value.find("day,") >= 0:
if value.find("days,") >= 0:
days, value = value.split("days,")
else:
days, value = value.split("day,")
value = value.strip()
try:
days = int(days.strip())
except ValueError:
formatError(value)
days = range_check(days, "days", 0)
try:
h, m, s = value.split(":")
except ValueError:
formatError(value)
h = range_check(h, "hours", 0)
m = range_check(m, "minutes", 0, 59)
if s.find(".") >= 0:
s, ms = s.split(".")
else:
ms = "0"
s = range_check(s, "seconds", 0, 59)
l = len(ms)
ms = range_check(ms, "microseconds", 0, microseconds)
ms = ms * (microseconds / (10 ** l))
return timedelta(
days=days, hours=h, minutes=m,
seconds=s, microseconds=ms)
# other database backends:
return timedelta(seconds=float(value) / microseconds)
def get_db_prep_value(self, value, connection, prepared=False):
if value is None or value is '':
return None
if connection.settings_dict['ENGINE'].find('postgresql') >= 0 or \
connection.settings_dict['ENGINE'].find('postgis') >= 0:
if isinstance(value, six.string_types):
# Can happen, when using south migrations
return value
return timedelta_topgsqlstring(value)
return timedelta_tobigint(value)
def formfield(self, form_class=IntervalFormField, **kwargs):
defaults = {'min_value': self.min_value,
'max_value': self.max_value,
'format': self.format or 'DHMS',
'required': not self.blank,
'label': capfirst(self.verbose_name),
'help_text': self.help_text}
if self.has_default():
defaults['initial'] = self.default
defaults.update(kwargs)
return form_class(**defaults)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^interval\.fields\.IntervalField"])
except ImportError:
pass
| mit | 1,947,934,884,740,996,900 | 2,408,291,114,536,856,600 | 29.630682 | 77 | 0.558894 | false |
wschwa/Mr-Orange-Sick-Beard | lib/dateutil/zoneinfo/__init__.py | 265 | 2575 | """
Copyright (c) 2003-2005 Gustavo Niemeyer <gustavo@niemeyer.net>
This module offers extensions to the standard python 2.3+
datetime module.
"""
from dateutil.tz import tzfile
from tarfile import TarFile
import os
__author__ = "Gustavo Niemeyer <gustavo@niemeyer.net>"
__license__ = "PSF License"
__all__ = ["setcachesize", "gettz", "rebuild"]
CACHE = []
CACHESIZE = 10
class tzfile(tzfile):
def __reduce__(self):
return (gettz, (self._filename,))
def getzoneinfofile():
filenames = os.listdir(os.path.join(os.path.dirname(__file__)))
filenames.sort()
filenames.reverse()
for entry in filenames:
if entry.startswith("zoneinfo") and ".tar." in entry:
return os.path.join(os.path.dirname(__file__), entry)
return None
ZONEINFOFILE = getzoneinfofile()
del getzoneinfofile
def setcachesize(size):
global CACHESIZE, CACHE
CACHESIZE = size
del CACHE[size:]
def gettz(name):
tzinfo = None
if ZONEINFOFILE:
for cachedname, tzinfo in CACHE:
if cachedname == name:
break
else:
tf = TarFile.open(ZONEINFOFILE)
try:
zonefile = tf.extractfile(name)
except KeyError:
tzinfo = None
else:
tzinfo = tzfile(zonefile)
tf.close()
CACHE.insert(0, (name, tzinfo))
del CACHE[CACHESIZE:]
return tzinfo
def rebuild(filename, tag=None, format="gz"):
import tempfile, shutil
tmpdir = tempfile.mkdtemp()
zonedir = os.path.join(tmpdir, "zoneinfo")
moduledir = os.path.dirname(__file__)
if tag: tag = "-"+tag
targetname = "zoneinfo%s.tar.%s" % (tag, format)
try:
tf = TarFile.open(filename)
for name in tf.getnames():
if not (name.endswith(".sh") or
name.endswith(".tab") or
name == "leapseconds"):
tf.extract(name, tmpdir)
filepath = os.path.join(tmpdir, name)
os.system("zic -d %s %s" % (zonedir, filepath))
tf.close()
target = os.path.join(moduledir, targetname)
for entry in os.listdir(moduledir):
if entry.startswith("zoneinfo") and ".tar." in entry:
os.unlink(os.path.join(moduledir, entry))
tf = TarFile.open(target, "w:%s" % format)
for entry in os.listdir(zonedir):
entrypath = os.path.join(zonedir, entry)
tf.add(entrypath, entry)
tf.close()
finally:
shutil.rmtree(tmpdir)
| gpl-3.0 | 7,131,563,273,749,819,000 | 8,830,596,749,849,191,000 | 28.597701 | 67 | 0.580583 | false |
stonewell/wxglterm | src/utils/app_config.py | 1 | 1301 | import os
import json
class DictQuery(dict):
def get(self, path, default=None):
try:
return self.__get(path, default)
except:
import logging
logging.exception('get failed')
def __get(self, path, default=None):
keys = path.split("/")
val = None
for key in keys:
# skip empty keys for // and path start with /
if len(key) == 0:
continue
if val:
if isinstance(val, list):
val = [v.get(key, default)
if v else None for v in val]
else:
val = val.get(key, default)
else:
val = dict.get(self, key, default)
if not val or val == default:
break;
if isinstance(val, list) or isinstance(val, dict):
return json.dumps(val)
return val if val is not None else default
def load_config(config_path):
if not os.path.exists(config_path):
msg = 'unable to find the config file:{}'.format(config_path)
raise ValueError(msg)
with open(config_path) as f:
return DictQuery(json.load(f))
def load_config_from_string(data):
return DictQuery(json.loads(data))
| mit | -1,569,137,811,678,816,500 | -7,526,669,967,914,792,000 | 25.55102 | 69 | 0.520369 | false |
liveblog/superdesk | server/apps/planning.py | 10 | 1989 | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from superdesk.notification import push_notification
from superdesk.resource import Resource
from apps.archive.common import on_create_item
from superdesk.services import BaseService
import superdesk
def init_app(app):
endpoint_name = 'planning'
service = PlanningService(endpoint_name, backend=superdesk.get_backend())
PlanningResource(endpoint_name, app=app, service=service)
class PlanningResource(Resource):
schema = {
'guid': {
'type': 'string',
'unique': True
},
'language': {
'type': 'string'
},
'headline': {
'type': 'string'
},
'slugline': {
'type': 'string'
},
'description_text': {
'type': 'string',
'nullable': True
},
'firstcreated': {
'type': 'datetime'
},
'urgency': {
'type': 'integer'
},
'desk': Resource.rel('desks', True)
}
item_url = 'regex("[\w,.:-]+")'
datasource = {'search_backend': 'elastic'}
resource_methods = ['GET', 'POST']
privileges = {'POST': 'planning', 'PATCH': 'planning'}
class PlanningService(BaseService):
def on_create(self, docs):
on_create_item(docs)
def on_created(self, docs):
push_notification('planning', created=1)
def on_updated(self, updates, original):
push_notification('planning', updated=1)
def on_deleted(self, doc):
push_notification('planning', deleted=1)
superdesk.privilege(name='planning',
label='Planning Management',
description='User can plan and cover.')
| agpl-3.0 | -5,129,543,211,809,853,000 | 8,369,368,256,399,879,000 | 25.878378 | 77 | 0.589744 | false |
jinnykoo/wuyisj | src/oscar/apps/order/south_migrations/0008_auto__add_field_orderdiscount_category.py | 16 | 33182 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from oscar.core.compat import AUTH_USER_MODEL, AUTH_USER_MODEL_NAME
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'OrderDiscount.category'
db.add_column('order_orderdiscount', 'category',
self.gf('django.db.models.fields.CharField')(default='Basket', max_length=64),
keep_default=False)
def backwards(self, orm):
# Deleting field 'OrderDiscount.category'
db.delete_column('order_orderdiscount', 'category')
models = {
'address.country': {
'Meta': {'ordering': "('-is_highlighted', 'name')", 'object_name': 'Country'},
'is_highlighted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_shipping_country': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'iso_3166_1_a2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'iso_3166_1_a3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'db_index': 'True'}),
'iso_3166_1_numeric': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
AUTH_USER_MODEL: {
'Meta': {'object_name': AUTH_USER_MODEL_NAME},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'catalogue.attributeentity': {
'Meta': {'object_name': 'AttributeEntity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': "orm['catalogue.AttributeEntityType']"})
},
'catalogue.attributeentitytype': {
'Meta': {'object_name': 'AttributeEntityType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'})
},
'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['catalogue.AttributeOptionGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.ProductAttribute']", 'through': "orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Category']", 'through': "orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': "orm['catalogue.Product']"}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductClass']", 'null': 'True'}),
'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Product']", 'symmetrical': 'False', 'through': "orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': "orm['catalogue.Product']"}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'catalogue.productattribute': {
'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': "orm['catalogue.ProductClass']"}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
},
'catalogue.productattributevalue': {
'Meta': {'object_name': 'ProductAttributeValue'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductAttribute']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': "orm['catalogue.Product']"}),
'value_boolean': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'catalogue.productcategory': {
'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'catalogue.productclass': {
'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'requires_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'track_stock': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalogue.productrecommendation': {
'Meta': {'object_name': 'ProductRecommendation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': "orm['catalogue.Product']"}),
'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'customer.communicationeventtype': {
'Meta': {'object_name': 'CommunicationEventType'},
'category': ('django.db.models.fields.CharField', [], {'default': "u'Order related'", 'max_length': '255'}),
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'email_body_html_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email_body_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email_subject_template': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'sms_template': ('django.db.models.fields.CharField', [], {'max_length': '170', 'blank': 'True'})
},
'order.billingaddress': {
'Meta': {'object_name': 'BillingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'order.communicationevent': {
'Meta': {'object_name': 'CommunicationEvent'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['customer.CommunicationEventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'communication_events'", 'to': "orm['order.Order']"})
},
'order.line': {
'Meta': {'object_name': 'Line'},
'est_dispatch_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_price_before_discounts_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_before_discounts_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lines'", 'to': "orm['order.Order']"}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order_lines'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['partner.Partner']"}),
'partner_line_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'partner_line_reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'partner_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'partner_sku': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'unit_cost_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_retail_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'})
},
'order.lineattribute': {
'Meta': {'object_name': 'LineAttribute'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attributes'", 'to': "orm['order.Line']"}),
'option': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_attributes'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['catalogue.Option']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'order.lineprice': {
'Meta': {'ordering': "('id',)", 'object_name': 'LinePrice'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'prices'", 'to': "orm['order.Line']"}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_prices'", 'to': "orm['order.Order']"}),
'price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'})
},
'order.order': {
'Meta': {'ordering': "['-date_placed']", 'object_name': 'Order'},
'basket_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'billing_address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.BillingAddress']", 'null': 'True', 'blank': 'True'}),
'date_placed': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'guest_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'shipping_address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.ShippingAddress']", 'null': 'True', 'blank': 'True'}),
'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_method': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'total_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'total_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'orders'", 'null': 'True', 'to': "orm['{0}']".format(AUTH_USER_MODEL)})
},
'order.orderdiscount': {
'Meta': {'object_name': 'OrderDiscount'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'category': ('django.db.models.fields.CharField', [], {'default': "'Basket'", 'max_length': '64'}),
'frequency': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'offer_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'offer_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'discounts'", 'to': "orm['order.Order']"}),
'voucher_code': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'voucher_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'order.ordernote': {
'Meta': {'object_name': 'OrderNote'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'note_type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notes'", 'to': "orm['order.Order']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['{0}']".format(AUTH_USER_MODEL), 'null': 'True'})
},
'order.paymentevent': {
'Meta': {'object_name': 'PaymentEvent'},
'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.PaymentEventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['order.Line']", 'through': "orm['order.PaymentEventQuantity']", 'symmetrical': 'False'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payment_events'", 'to': "orm['order.Order']"})
},
'order.paymenteventquantity': {
'Meta': {'object_name': 'PaymentEventQuantity'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': "orm['order.PaymentEvent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.Line']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'order.paymenteventtype': {
'Meta': {'ordering': "('sequence_number',)", 'object_name': 'PaymentEventType'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'sequence_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'order.shippingaddress': {
'Meta': {'object_name': 'ShippingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'order.shippingevent': {
'Meta': {'ordering': "['-date']", 'object_name': 'ShippingEvent'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.ShippingEventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['order.Line']", 'through': "orm['order.ShippingEventQuantity']", 'symmetrical': 'False'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_events'", 'to': "orm['order.Order']"})
},
'order.shippingeventquantity': {
'Meta': {'object_name': 'ShippingEventQuantity'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': "orm['order.ShippingEvent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.Line']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'order.shippingeventtype': {
'Meta': {'ordering': "('sequence_number',)", 'object_name': 'ShippingEventType'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_required': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'sequence_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'partner.partner': {
'Meta': {'object_name': 'Partner'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'partners'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['{0}']".format(AUTH_USER_MODEL)})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['order']
| bsd-3-clause | 7,363,975,467,552,943,000 | 2,277,843,703,271,861,500 | 86.783069 | 222 | 0.554276 | false |
Brian-Tomasik/leveraged_investing | TaxRates.py | 1 | 1393 | class TaxRates(object):
"""Investor's tax rates"""
def __init__(self, short_term_cap_gains_rate=.28,
long_term_cap_gains_rate=.15, state_income_tax=.05):
self.__short_term_cap_gains_rate = short_term_cap_gains_rate
self.__long_term_cap_gains_rate = long_term_cap_gains_rate
self.__state_income_tax = state_income_tax
def short_term_cap_gains_rate_plus_state(self):
return self.__short_term_cap_gains_rate + self.__state_income_tax
def long_term_cap_gains_rate_plus_state(self):
return self.__long_term_cap_gains_rate + self.__state_income_tax
def income_tax_rate_plus_state(self):
"""Income tax rate is same as short-term cap-gains rate."""
return self.short_term_cap_gains_rate_plus_state()
@property
def short_term_cap_gains_rate(self):
return self.__short_term_cap_gains_rate
@property
def long_term_cap_gains_rate(self):
return self.__long_term_cap_gains_rate
@property
def state_income_tax(self):
"""Since most states don't distinguish short- vs. long-term
capital gains (see http://www.aaii.com/journal/article/capital-pains-rules-for-capital-losses.touch ,
section 'State Income Taxes'), this can probably just be
one number, without distinguishing short- vs. long-term."""
return self.__state_income_tax | unlicense | 6,313,999,404,816,647,000 | 3,654,217,385,560,893,400 | 40 | 109 | 0.653984 | false |
SujaySKumar/django | tests/apps/tests.py | 68 | 16296 | from __future__ import unicode_literals
import os
import warnings
from unittest import skipUnless
from django.apps import AppConfig, apps
from django.apps.registry import Apps
from django.contrib.admin.models import LogEntry
from django.core.exceptions import AppRegistryNotReady, ImproperlyConfigured
from django.db import models
from django.test import SimpleTestCase, override_settings
from django.test.utils import extend_sys_path
from django.utils import six
from django.utils._os import upath
from .default_config_app.apps import CustomConfig
from .models import SoAlternative, TotallyNormal, new_apps
# Small list with a variety of cases for tests that iterate on installed apps.
# Intentionally not in alphabetical order to check if the order is preserved.
SOME_INSTALLED_APPS = [
'apps.apps.MyAdmin',
'apps.apps.MyAuth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
SOME_INSTALLED_APPS_NAMES = [
'django.contrib.admin',
'django.contrib.auth',
] + SOME_INSTALLED_APPS[2:]
HERE = os.path.dirname(upath(__file__))
class AppsTests(SimpleTestCase):
def test_singleton_master(self):
"""
Ensures that only one master registry can exist.
"""
with self.assertRaises(RuntimeError):
Apps(installed_apps=None)
def test_ready(self):
"""
Tests the ready property of the master registry.
"""
# The master app registry is always ready when the tests run.
self.assertTrue(apps.ready)
# Non-master app registries are populated in __init__.
self.assertTrue(Apps().ready)
def test_bad_app_config(self):
"""
Tests when INSTALLED_APPS contains an incorrect app config.
"""
with self.assertRaises(ImproperlyConfigured):
with self.settings(INSTALLED_APPS=['apps.apps.BadConfig']):
pass
def test_not_an_app_config(self):
"""
Tests when INSTALLED_APPS contains a class that isn't an app config.
"""
with self.assertRaises(ImproperlyConfigured):
with self.settings(INSTALLED_APPS=['apps.apps.NotAConfig']):
pass
def test_no_such_app(self):
"""
Tests when INSTALLED_APPS contains an app that doesn't exist, either
directly or via an app config.
"""
with self.assertRaises(ImportError):
with self.settings(INSTALLED_APPS=['there is no such app']):
pass
with self.assertRaises(ImportError):
with self.settings(INSTALLED_APPS=['apps.apps.NoSuchApp']):
pass
def test_no_such_app_config(self):
"""
Tests when INSTALLED_APPS contains an entry that doesn't exist.
"""
with self.assertRaises(ImportError):
with self.settings(INSTALLED_APPS=['apps.apps.NoSuchConfig']):
pass
def test_default_app_config(self):
with self.settings(INSTALLED_APPS=['apps.default_config_app']):
config = apps.get_app_config('default_config_app')
self.assertIsInstance(config, CustomConfig)
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_get_app_configs(self):
"""
Tests apps.get_app_configs().
"""
app_configs = apps.get_app_configs()
self.assertListEqual(
[app_config.name for app_config in app_configs],
SOME_INSTALLED_APPS_NAMES)
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_get_app_config(self):
"""
Tests apps.get_app_config().
"""
app_config = apps.get_app_config('admin')
self.assertEqual(app_config.name, 'django.contrib.admin')
app_config = apps.get_app_config('staticfiles')
self.assertEqual(app_config.name, 'django.contrib.staticfiles')
with self.assertRaises(LookupError):
apps.get_app_config('webdesign')
msg = "No installed app with label 'django.contrib.auth'. Did you mean 'myauth'"
with self.assertRaisesMessage(LookupError, msg):
apps.get_app_config('django.contrib.auth')
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_is_installed(self):
"""
Tests apps.is_installed().
"""
self.assertTrue(apps.is_installed('django.contrib.admin'))
self.assertTrue(apps.is_installed('django.contrib.auth'))
self.assertTrue(apps.is_installed('django.contrib.staticfiles'))
self.assertFalse(apps.is_installed('django.contrib.webdesign'))
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_get_model(self):
"""
Tests apps.get_model().
"""
self.assertEqual(apps.get_model('admin', 'LogEntry'), LogEntry)
with self.assertRaises(LookupError):
apps.get_model('admin', 'LogExit')
# App label is case-sensitive, Model name is case-insensitive.
self.assertEqual(apps.get_model('admin', 'loGentrY'), LogEntry)
with self.assertRaises(LookupError):
apps.get_model('Admin', 'LogEntry')
# A single argument is accepted.
self.assertEqual(apps.get_model('admin.LogEntry'), LogEntry)
with self.assertRaises(LookupError):
apps.get_model('admin.LogExit')
with self.assertRaises(ValueError):
apps.get_model('admin_LogEntry')
@override_settings(INSTALLED_APPS=['apps.apps.RelabeledAppsConfig'])
def test_relabeling(self):
self.assertEqual(apps.get_app_config('relabeled').name, 'apps')
def test_duplicate_labels(self):
with six.assertRaisesRegex(self, ImproperlyConfigured, "Application labels aren't unique"):
with self.settings(INSTALLED_APPS=['apps.apps.PlainAppsConfig', 'apps']):
pass
def test_duplicate_names(self):
with six.assertRaisesRegex(self, ImproperlyConfigured, "Application names aren't unique"):
with self.settings(INSTALLED_APPS=['apps.apps.RelabeledAppsConfig', 'apps']):
pass
def test_import_exception_is_not_masked(self):
"""
App discovery should preserve stack traces. Regression test for #22920.
"""
with six.assertRaisesRegex(self, ImportError, "Oops"):
with self.settings(INSTALLED_APPS=['import_error_package']):
pass
def test_models_py(self):
"""
Tests that the models in the models.py file were loaded correctly.
"""
self.assertEqual(apps.get_model("apps", "TotallyNormal"), TotallyNormal)
with self.assertRaises(LookupError):
apps.get_model("apps", "SoAlternative")
with self.assertRaises(LookupError):
new_apps.get_model("apps", "TotallyNormal")
self.assertEqual(new_apps.get_model("apps", "SoAlternative"), SoAlternative)
def test_dynamic_load(self):
"""
Makes a new model at runtime and ensures it goes into the right place.
"""
old_models = list(apps.get_app_config("apps").get_models())
# Construct a new model in a new app registry
body = {}
new_apps = Apps(["apps"])
meta_contents = {
'app_label': "apps",
'apps': new_apps,
}
meta = type(str("Meta"), tuple(), meta_contents)
body['Meta'] = meta
body['__module__'] = TotallyNormal.__module__
temp_model = type(str("SouthPonies"), (models.Model,), body)
# Make sure it appeared in the right place!
self.assertListEqual(list(apps.get_app_config("apps").get_models()), old_models)
with self.assertRaises(LookupError):
apps.get_model("apps", "SouthPonies")
self.assertEqual(new_apps.get_model("apps", "SouthPonies"), temp_model)
def test_model_clash(self):
"""
Test for behavior when two models clash in the app registry.
"""
new_apps = Apps(["apps"])
meta_contents = {
'app_label': "apps",
'apps': new_apps,
}
body = {}
body['Meta'] = type(str("Meta"), tuple(), meta_contents)
body['__module__'] = TotallyNormal.__module__
type(str("SouthPonies"), (models.Model,), body)
# When __name__ and __module__ match we assume the module
# was reloaded and issue a warning. This use-case is
# useful for REPL. Refs #23621.
body = {}
body['Meta'] = type(str("Meta"), tuple(), meta_contents)
body['__module__'] = TotallyNormal.__module__
with warnings.catch_warnings(record=True) as w:
type(str("SouthPonies"), (models.Model,), body)
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[-1].category, RuntimeWarning))
self.assertEqual(str(w[-1].message),
"Model 'apps.southponies' was already registered. "
"Reloading models is not advised as it can lead to inconsistencies, "
"most notably with related models.")
# If it doesn't appear to be a reloaded module then we expect
# a RuntimeError.
body = {}
body['Meta'] = type(str("Meta"), tuple(), meta_contents)
body['__module__'] = TotallyNormal.__module__ + '.whatever'
with six.assertRaisesRegex(self, RuntimeError,
"Conflicting 'southponies' models in application 'apps':.*"):
type(str("SouthPonies"), (models.Model,), body)
def test_get_containing_app_config_apps_not_ready(self):
"""
apps.get_containing_app_config() should raise an exception if
apps.apps_ready isn't True.
"""
apps.apps_ready = False
try:
with self.assertRaisesMessage(AppRegistryNotReady, "Apps aren't loaded yet"):
apps.get_containing_app_config('foo')
finally:
apps.apps_ready = True
def test_lazy_model_operation(self):
"""
Tests apps.lazy_model_operation().
"""
model_classes = []
initial_pending = set(apps._pending_operations)
def test_func(*models):
model_classes[:] = models
class LazyA(models.Model):
pass
# Test models appearing twice, and models appearing consecutively
model_keys = [('apps', model_name) for model_name in ['lazya', 'lazyb', 'lazyb', 'lazyc', 'lazya']]
apps.lazy_model_operation(test_func, *model_keys)
# LazyModelA shouldn't be waited on since it's already registered,
# and LazyModelC shouldn't be waited on until LazyModelB exists.
self.assertSetEqual(set(apps._pending_operations) - initial_pending, {('apps', 'lazyb')})
# Test that multiple operations can wait on the same model
apps.lazy_model_operation(test_func, ('apps', 'lazyb'))
class LazyB(models.Model):
pass
self.assertListEqual(model_classes, [LazyB])
# Now we are just waiting on LazyModelC.
self.assertSetEqual(set(apps._pending_operations) - initial_pending, {('apps', 'lazyc')})
class LazyC(models.Model):
pass
# Everything should be loaded - make sure the callback was executed properly.
self.assertListEqual(model_classes, [LazyA, LazyB, LazyB, LazyC, LazyA])
class Stub(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
class AppConfigTests(SimpleTestCase):
"""Unit tests for AppConfig class."""
def test_path_set_explicitly(self):
"""If subclass sets path as class attr, no module attributes needed."""
class MyAppConfig(AppConfig):
path = 'foo'
ac = MyAppConfig('label', Stub())
self.assertEqual(ac.path, 'foo')
def test_explicit_path_overrides(self):
"""If path set as class attr, overrides __path__ and __file__."""
class MyAppConfig(AppConfig):
path = 'foo'
ac = MyAppConfig('label', Stub(__path__=['a'], __file__='b/__init__.py'))
self.assertEqual(ac.path, 'foo')
def test_dunder_path(self):
"""If single element in __path__, use it (in preference to __file__)."""
ac = AppConfig('label', Stub(__path__=['a'], __file__='b/__init__.py'))
self.assertEqual(ac.path, 'a')
def test_no_dunder_path_fallback_to_dunder_file(self):
"""If there is no __path__ attr, use __file__."""
ac = AppConfig('label', Stub(__file__='b/__init__.py'))
self.assertEqual(ac.path, 'b')
def test_empty_dunder_path_fallback_to_dunder_file(self):
"""If the __path__ attr is empty, use __file__ if set."""
ac = AppConfig('label', Stub(__path__=[], __file__='b/__init__.py'))
self.assertEqual(ac.path, 'b')
def test_multiple_dunder_path_fallback_to_dunder_file(self):
"""If the __path__ attr is length>1, use __file__ if set."""
ac = AppConfig('label', Stub(__path__=['a', 'b'], __file__='c/__init__.py'))
self.assertEqual(ac.path, 'c')
def test_no_dunder_path_or_dunder_file(self):
"""If there is no __path__ or __file__, raise ImproperlyConfigured."""
with self.assertRaises(ImproperlyConfigured):
AppConfig('label', Stub())
def test_empty_dunder_path_no_dunder_file(self):
"""If the __path__ attr is empty and there is no __file__, raise."""
with self.assertRaises(ImproperlyConfigured):
AppConfig('label', Stub(__path__=[]))
def test_multiple_dunder_path_no_dunder_file(self):
"""If the __path__ attr is length>1 and there is no __file__, raise."""
with self.assertRaises(ImproperlyConfigured):
AppConfig('label', Stub(__path__=['a', 'b']))
def test_duplicate_dunder_path_no_dunder_file(self):
"""
If the __path__ attr contains duplicate paths and there is no
__file__, they duplicates should be deduplicated (#25246).
"""
ac = AppConfig('label', Stub(__path__=['a', 'a']))
self.assertEqual(ac.path, 'a')
@skipUnless(six.PY3, "Namespace packages sans __init__.py were added in Python 3.3")
class NamespacePackageAppTests(SimpleTestCase):
# We need nsapp to be top-level so our multiple-paths tests can add another
# location for it (if its inside a normal package with an __init__.py that
# isn't possible). In order to avoid cluttering the already-full tests/ dir
# (which is on sys.path), we add these new entries to sys.path temporarily.
base_location = os.path.join(HERE, 'namespace_package_base')
other_location = os.path.join(HERE, 'namespace_package_other_base')
app_path = os.path.join(base_location, 'nsapp')
def test_single_path(self):
"""
A Py3.3+ namespace package can be an app if it has only one path.
"""
with extend_sys_path(self.base_location):
with self.settings(INSTALLED_APPS=['nsapp']):
app_config = apps.get_app_config('nsapp')
self.assertEqual(app_config.path, upath(self.app_path))
def test_multiple_paths(self):
"""
A Py3.3+ namespace package with multiple locations cannot be an app.
(Because then we wouldn't know where to load its templates, static
assets, etc from.)
"""
# Temporarily add two directories to sys.path that both contain
# components of the "nsapp" package.
with extend_sys_path(self.base_location, self.other_location):
with self.assertRaises(ImproperlyConfigured):
with self.settings(INSTALLED_APPS=['nsapp']):
pass
def test_multiple_paths_explicit_path(self):
"""
Multiple locations are ok only if app-config has explicit path.
"""
# Temporarily add two directories to sys.path that both contain
# components of the "nsapp" package.
with extend_sys_path(self.base_location, self.other_location):
with self.settings(INSTALLED_APPS=['nsapp.apps.NSAppConfig']):
app_config = apps.get_app_config('nsapp')
self.assertEqual(app_config.path, upath(self.app_path))
| bsd-3-clause | -8,702,254,904,637,114,000 | -601,868,650,438,172,800 | 37.892601 | 107 | 0.616838 | false |
grburgess/astromodels | astromodels/tests/test_template_model.py | 2 | 4653 | import pytest
import os
import numpy as np
from astromodels.functions.template_model import TemplateModel, TemplateModelFactory, MissingDataFile
from astromodels.functions.functions import Band, Powerlaw
from astromodels import Model, PointSource, clone_model, load_model
import pickle
__author__ = 'giacomov'
def get_comparison_function():
mo = Band()
mo.K = 1
return mo
@pytest.mark.slow
def test_template_factory_1D():
mo = get_comparison_function()
energies = np.logspace(1, 3, 50)
t = TemplateModelFactory('__test1D', 'A test template', energies, ['alpha'])
alpha_grid = np.linspace(-1.5, 1, 15)
#beta_grid = np.linspace(-3.5, -1.6, 15)
#xp_grid = np.logspace(1, 3, 20)
t.define_parameter_grid('alpha', alpha_grid)
for a in alpha_grid:
mo.alpha = a
mo.beta = -2.5
mo.xp = 300.
t.add_interpolation_data(mo(energies), alpha=a)
print("Data has been prepared")
t.save_data(overwrite=True)
@pytest.mark.slow
def test_template_factory():
mo = get_comparison_function()
energies = np.logspace(1, 3, 50)
t = TemplateModelFactory('__test', 'A test template', energies, ['alpha', 'xp', 'beta'])
alpha_grid = np.linspace(-1.5, 1, 15)
beta_grid = np.linspace(-3.5, -1.6, 15)
xp_grid = np.logspace(1, 3, 20)
t.define_parameter_grid('alpha', alpha_grid)
t.define_parameter_grid('beta', beta_grid)
t.define_parameter_grid('xp', xp_grid)
for a in alpha_grid:
for b in beta_grid:
for xp in xp_grid:
mo.alpha = a
mo.beta = b
mo.xp = xp
t.add_interpolation_data(mo(energies), alpha=a, xp=xp, beta=b)
print("Data has been prepared")
t.save_data(overwrite=True)
tm = TemplateModel('__test1D')
tm(energies)
# This will be run second, so the template will exist
@pytest.mark.slow
def test_template_function():
tm = TemplateModel('__test')
mo = get_comparison_function()
new_alpha_grid = np.linspace(-1.5, 1, 15)
new_beta_grid = np.linspace(-3.5, -1.6, 15)
new_xp_grid = np.logspace(1, 3, 15)
new_energies = np.logspace(1, 3, 40)
tm.K = 1
mo.K = 1
for a in new_alpha_grid:
for b in new_beta_grid:
for xp in new_xp_grid:
mo.alpha = a
mo.beta = b
mo.xp = xp
tm.alpha = a
tm.beta = b
tm.xp = xp
res1 = mo(new_energies)
res2 = tm(new_energies)
deltas = np.abs((res2 - res1) / res1)
idx = deltas > 0.1
if np.any(idx):
raise AssertionError("Interpolation precision @ %s is %s, "
"worse than 10 percent, "
"with parameters %s!" % (new_energies[idx], deltas[idx], [a,b,xp]))
@pytest.mark.slow
def test_input_output():
tm = TemplateModel('__test')
tm.alpha = -0.95
tm.beta = -2.23
fake_source = PointSource("test", ra=0.0, dec=0.0, spectral_shape=tm)
fake_model = Model(fake_source)
clone = clone_model(fake_model)
assert clone.get_number_of_point_sources() == 1
assert tm.data_file == clone.test.spectrum.main.shape.data_file
assert clone.test.spectrum.main.shape.alpha.value == tm.alpha.value
assert clone.test.spectrum.main.shape.beta.value == tm.beta.value
xx = np.linspace(1, 10, 100)
assert np.allclose(clone.test.spectrum.main.shape(xx), fake_model.test.spectrum.main.shape(xx))
# Test pickling
dump = pickle.dumps(clone)
clone2 = pickle.loads(dump)
assert clone2.get_number_of_point_sources() == 1
assert tm.data_file == clone2.test.spectrum.main.shape.data_file
assert np.allclose(clone2.test.spectrum.main.shape(xx), fake_model.test.spectrum.main.shape(xx))
# Test pickling with other functions
new_shape = tm * Powerlaw()
new_shape.index_2 = -2.256
dump2 = pickle.dumps(new_shape)
clone3 = pickle.loads(dump2)
assert clone3.index_2.value == new_shape.index_2.value
# Now save to disk and reload
fake_source2 = PointSource("test", ra=0.0, dec=0.0, spectral_shape=new_shape)
fake_model2 = Model(fake_source2)
fake_model2.save("__test.yml", overwrite=True)
# Now try to reload
reloaded_model = load_model("__test.yml")
assert reloaded_model.get_number_of_point_sources() == 1
assert np.allclose(fake_model2.test.spectrum.main.shape(xx), reloaded_model.test.spectrum.main.shape(xx))
os.remove("__test.yml")
| bsd-3-clause | -3,412,712,558,130,421,000 | -8,497,898,536,231,715,000 | 23.489474 | 109 | 0.601547 | false |
yifanzh/ohmypaw | cogs/streams.py | 5 | 26330 | from discord.ext import commands
from .utils.dataIO import dataIO
from .utils.chat_formatting import escape_mass_mentions
from .utils import checks
from collections import defaultdict
from string import ascii_letters
from random import choice
import discord
import os
import re
import aiohttp
import asyncio
import logging
import json
class StreamsError(Exception):
pass
class StreamNotFound(StreamsError):
pass
class APIError(StreamsError):
pass
class InvalidCredentials(StreamsError):
pass
class OfflineStream(StreamsError):
pass
class Streams:
"""Streams
Alerts for a variety of streaming services"""
def __init__(self, bot):
self.bot = bot
self.twitch_streams = dataIO.load_json("data/streams/twitch.json")
self.hitbox_streams = dataIO.load_json("data/streams/hitbox.json")
self.mixer_streams = dataIO.load_json("data/streams/beam.json")
self.picarto_streams = dataIO.load_json("data/streams/picarto.json")
settings = dataIO.load_json("data/streams/settings.json")
self.settings = defaultdict(dict, settings)
self.messages_cache = defaultdict(list)
@commands.command()
async def hitbox(self, stream: str):
"""Checks if hitbox stream is online"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(hitbox\.tv\/)'
stream = re.sub(regex, '', stream)
try:
embed = await self.hitbox_online(stream)
except OfflineStream:
await self.bot.say(stream + " is offline.")
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
except APIError:
await self.bot.say("Error contacting the API.")
else:
await self.bot.say(embed=embed)
@commands.command(pass_context=True)
async def twitch(self, ctx, stream: str):
"""Checks if twitch stream is online"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(twitch\.tv\/)'
stream = re.sub(regex, '', stream)
try:
data = await self.fetch_twitch_ids(stream, raise_if_none=True)
embed = await self.twitch_online(data[0]["_id"])
except OfflineStream:
await self.bot.say(stream + " is offline.")
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
except APIError:
await self.bot.say("Error contacting the API.")
except InvalidCredentials:
await self.bot.say("Owner: Client-ID is invalid or not set. "
"See `{}streamset twitchtoken`"
"".format(ctx.prefix))
else:
await self.bot.say(embed=embed)
@commands.command()
async def mixer(self, stream: str):
"""Checks if mixer stream is online"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(mixer\.com\/)'
stream = re.sub(regex, '', stream)
try:
embed = await self.mixer_online(stream)
except OfflineStream:
await self.bot.say(stream + " is offline.")
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
except APIError:
await self.bot.say("Error contacting the API.")
else:
await self.bot.say(embed=embed)
@commands.command()
async def picarto(self, stream: str):
"""Checks if picarto stream is online"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(picarto\.tv\/)'
stream = re.sub(regex, '', stream)
try:
embed = await self.picarto_online(stream)
except OfflineStream:
await self.bot.say(stream + " is offline.")
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
except APIError:
await self.bot.say("Error contacting the API.")
else:
await self.bot.say(embed=embed)
@commands.group(pass_context=True, no_pm=True)
@checks.mod_or_permissions(manage_server=True)
async def streamalert(self, ctx):
"""Adds/removes stream alerts from the current channel"""
if ctx.invoked_subcommand is None:
await self.bot.send_cmd_help(ctx)
@streamalert.command(name="twitch", pass_context=True)
async def twitch_alert(self, ctx, stream: str):
"""Adds/removes twitch alerts from the current channel"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(twitch\.tv\/)'
stream = re.sub(regex, '', stream)
channel = ctx.message.channel
try:
data = await self.fetch_twitch_ids(stream, raise_if_none=True)
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
return
except APIError:
await self.bot.say("Error contacting the API.")
return
except InvalidCredentials:
await self.bot.say("Owner: Client-ID is invalid or not set. "
"See `{}streamset twitchtoken`"
"".format(ctx.prefix))
return
enabled = self.enable_or_disable_if_active(self.twitch_streams,
stream,
channel,
_id=data[0]["_id"])
if enabled:
await self.bot.say("Alert activated. I will notify this channel "
"when {} is live.".format(stream))
else:
await self.bot.say("Alert has been removed from this channel.")
dataIO.save_json("data/streams/twitch.json", self.twitch_streams)
@streamalert.command(name="hitbox", pass_context=True)
async def hitbox_alert(self, ctx, stream: str):
"""Adds/removes hitbox alerts from the current channel"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(hitbox\.tv\/)'
stream = re.sub(regex, '', stream)
channel = ctx.message.channel
try:
await self.hitbox_online(stream)
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
return
except APIError:
await self.bot.say("Error contacting the API.")
return
except OfflineStream:
pass
enabled = self.enable_or_disable_if_active(self.hitbox_streams,
stream,
channel)
if enabled:
await self.bot.say("Alert activated. I will notify this channel "
"when {} is live.".format(stream))
else:
await self.bot.say("Alert has been removed from this channel.")
dataIO.save_json("data/streams/hitbox.json", self.hitbox_streams)
@streamalert.command(name="mixer", pass_context=True)
async def mixer_alert(self, ctx, stream: str):
"""Adds/removes mixer alerts from the current channel"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(mixer\.com\/)'
stream = re.sub(regex, '', stream)
channel = ctx.message.channel
try:
await self.mixer_online(stream)
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
return
except APIError:
await self.bot.say("Error contacting the API.")
return
except OfflineStream:
pass
enabled = self.enable_or_disable_if_active(self.mixer_streams,
stream,
channel)
if enabled:
await self.bot.say("Alert activated. I will notify this channel "
"when {} is live.".format(stream))
else:
await self.bot.say("Alert has been removed from this channel.")
dataIO.save_json("data/streams/beam.json", self.mixer_streams)
@streamalert.command(name="picarto", pass_context=True)
async def picarto_alert(self, ctx, stream: str):
"""Adds/removes picarto alerts from the current channel"""
stream = escape_mass_mentions(stream)
regex = r'^(https?\:\/\/)?(www\.)?(picarto\.tv\/)'
stream = re.sub(regex, '', stream)
channel = ctx.message.channel
try:
await self.picarto_online(stream)
except StreamNotFound:
await self.bot.say("That stream doesn't exist.")
return
except APIError:
await self.bot.say("Error contacting the API.")
return
except OfflineStream:
pass
enabled = self.enable_or_disable_if_active(self.picarto_streams,
stream,
channel)
if enabled:
await self.bot.say("Alert activated. I will notify this channel "
"when {} is live.".format(stream))
else:
await self.bot.say("Alert has been removed from this channel.")
dataIO.save_json("data/streams/picarto.json", self.picarto_streams)
@streamalert.command(name="stop", pass_context=True)
async def stop_alert(self, ctx):
"""Stops all streams alerts in the current channel"""
channel = ctx.message.channel
streams = (
self.hitbox_streams,
self.twitch_streams,
self.mixer_streams,
self.picarto_streams
)
for stream_type in streams:
to_delete = []
for s in stream_type:
if channel.id in s["CHANNELS"]:
s["CHANNELS"].remove(channel.id)
if not s["CHANNELS"]:
to_delete.append(s)
for s in to_delete:
stream_type.remove(s)
dataIO.save_json("data/streams/twitch.json", self.twitch_streams)
dataIO.save_json("data/streams/hitbox.json", self.hitbox_streams)
dataIO.save_json("data/streams/beam.json", self.mixer_streams)
dataIO.save_json("data/streams/picarto.json", self.picarto_streams)
await self.bot.say("There will be no more stream alerts in this "
"channel.")
@commands.group(pass_context=True)
async def streamset(self, ctx):
"""Stream settings"""
if ctx.invoked_subcommand is None:
await self.bot.send_cmd_help(ctx)
@streamset.command()
@checks.is_owner()
async def twitchtoken(self, token : str):
"""Sets the Client ID for twitch
To do this, follow these steps:
1. Go to this page: https://dev.twitch.tv/dashboard/apps.
2. Click 'Register Your Application'
3. Enter a name, set the OAuth Redirect URI to 'http://localhost', and
select an Application Category of your choosing.
4. Click 'Register', and on the following page, copy the Client ID.
5. Paste the Client ID into this command. Done!
"""
self.settings["TWITCH_TOKEN"] = token
dataIO.save_json("data/streams/settings.json", self.settings)
await self.bot.say('Twitch Client-ID set.')
@streamset.command(pass_context=True, no_pm=True)
@checks.admin()
async def mention(self, ctx, *, mention_type : str):
"""Sets mentions for stream alerts
Types: everyone, here, none"""
server = ctx.message.server
mention_type = mention_type.lower()
if mention_type in ("everyone", "here"):
self.settings[server.id]["MENTION"] = "@" + mention_type
await self.bot.say("When a stream is online @\u200b{} will be "
"mentioned.".format(mention_type))
elif mention_type == "none":
self.settings[server.id]["MENTION"] = ""
await self.bot.say("Mentions disabled.")
else:
await self.bot.send_cmd_help(ctx)
dataIO.save_json("data/streams/settings.json", self.settings)
@streamset.command(pass_context=True, no_pm=True)
@checks.admin()
async def autodelete(self, ctx):
"""Toggles automatic notification deletion for streams that go offline"""
server = ctx.message.server
settings = self.settings[server.id]
current = settings.get("AUTODELETE", True)
settings["AUTODELETE"] = not current
if settings["AUTODELETE"]:
await self.bot.say("Notifications will be automatically deleted "
"once the stream goes offline.")
else:
await self.bot.say("Notifications won't be deleted anymore.")
dataIO.save_json("data/streams/settings.json", self.settings)
async def hitbox_online(self, stream):
url = "https://api.hitbox.tv/media/live/" + stream
async with aiohttp.get(url) as r:
data = await r.json(encoding='utf-8')
if "livestream" not in data:
raise StreamNotFound()
elif data["livestream"][0]["media_is_live"] == "0":
raise OfflineStream()
elif data["livestream"][0]["media_is_live"] == "1":
return self.hitbox_embed(data)
raise APIError()
async def twitch_online(self, stream):
session = aiohttp.ClientSession()
url = "https://api.twitch.tv/kraken/streams/" + stream
header = {
'Client-ID': self.settings.get("TWITCH_TOKEN", ""),
'Accept': 'application/vnd.twitchtv.v5+json'
}
async with session.get(url, headers=header) as r:
data = await r.json(encoding='utf-8')
await session.close()
if r.status == 200:
if data["stream"] is None:
raise OfflineStream()
return self.twitch_embed(data)
elif r.status == 400:
raise InvalidCredentials()
elif r.status == 404:
raise StreamNotFound()
else:
raise APIError()
async def mixer_online(self, stream):
url = "https://mixer.com/api/v1/channels/" + stream
async with aiohttp.get(url) as r:
data = await r.json(encoding='utf-8')
if r.status == 200:
if data["online"] is True:
return self.mixer_embed(data)
else:
raise OfflineStream()
elif r.status == 404:
raise StreamNotFound()
else:
raise APIError()
async def picarto_online(self, stream):
url = "https://api.picarto.tv/v1/channel/name/" + stream
async with aiohttp.get(url) as r:
data = await r.text(encoding='utf-8')
if r.status == 200:
data = json.loads(data)
if data["online"] is True:
return self.picarto_embed(data)
else:
raise OfflineStream()
elif r.status == 404:
raise StreamNotFound()
else:
raise APIError()
async def fetch_twitch_ids(self, *streams, raise_if_none=False):
def chunks(l):
for i in range(0, len(l), 100):
yield l[i:i + 100]
base_url = "https://api.twitch.tv/kraken/users?login="
header = {
'Client-ID': self.settings.get("TWITCH_TOKEN", ""),
'Accept': 'application/vnd.twitchtv.v5+json'
}
results = []
for streams_list in chunks(streams):
session = aiohttp.ClientSession()
url = base_url + ",".join(streams_list)
async with session.get(url, headers=header) as r:
data = await r.json(encoding='utf-8')
if r.status == 200:
results.extend(data["users"])
elif r.status == 400:
raise InvalidCredentials()
else:
raise APIError()
await session.close()
if not results and raise_if_none:
raise StreamNotFound()
return results
def twitch_embed(self, data):
channel = data["stream"]["channel"]
url = channel["url"]
logo = channel["logo"]
if logo is None:
logo = "https://static-cdn.jtvnw.net/jtv_user_pictures/xarth/404_user_70x70.png"
status = channel["status"]
if not status:
status = "Untitled broadcast"
embed = discord.Embed(title=status, url=url)
embed.set_author(name=channel["display_name"])
embed.add_field(name="Followers", value=channel["followers"])
embed.add_field(name="Total views", value=channel["views"])
embed.set_thumbnail(url=logo)
if data["stream"]["preview"]["medium"]:
embed.set_image(url=data["stream"]["preview"]["medium"] + self.rnd_attr())
if channel["game"]:
embed.set_footer(text="Playing: " + channel["game"])
embed.color = 0x6441A4
return embed
def hitbox_embed(self, data):
base_url = "https://edge.sf.hitbox.tv"
livestream = data["livestream"][0]
channel = livestream["channel"]
url = channel["channel_link"]
embed = discord.Embed(title=livestream["media_status"], url=url)
embed.set_author(name=livestream["media_name"])
embed.add_field(name="Followers", value=channel["followers"])
#embed.add_field(name="Views", value=channel["views"])
embed.set_thumbnail(url=base_url + channel["user_logo"])
if livestream["media_thumbnail"]:
embed.set_image(url=base_url + livestream["media_thumbnail"] + self.rnd_attr())
embed.set_footer(text="Playing: " + livestream["category_name"])
embed.color = 0x98CB00
return embed
def mixer_embed(self, data):
default_avatar = ("https://mixer.com/_latest/assets/images/main/"
"avatars/default.jpg")
user = data["user"]
url = "https://mixer.com/" + data["token"]
embed = discord.Embed(title=data["name"], url=url)
embed.set_author(name=user["username"])
embed.add_field(name="Followers", value=data["numFollowers"])
embed.add_field(name="Total views", value=data["viewersTotal"])
if user["avatarUrl"]:
embed.set_thumbnail(url=user["avatarUrl"])
else:
embed.set_thumbnail(url=default_avatar)
if data["thumbnail"]:
embed.set_image(url=data["thumbnail"]["url"] + self.rnd_attr())
embed.color = 0x4C90F3
if data["type"] is not None:
embed.set_footer(text="Playing: " + data["type"]["name"])
return embed
def picarto_embed(self, data):
avatar = ("https://picarto.tv/user_data/usrimg/{}/dsdefault.jpg{}"
"".format(data["name"].lower(), self.rnd_attr()))
url = "https://picarto.tv/" + data["name"]
thumbnail = data["thumbnails"]["web"]
embed = discord.Embed(title=data["title"], url=url)
embed.set_author(name=data["name"])
embed.set_image(url=thumbnail + self.rnd_attr())
embed.add_field(name="Followers", value=data["followers"])
embed.add_field(name="Total views", value=data["viewers_total"])
embed.set_thumbnail(url=avatar)
embed.color = 0x132332
data["tags"] = ", ".join(data["tags"])
if not data["tags"]:
data["tags"] = "None"
if data["adult"]:
data["adult"] = "NSFW | "
else:
data["adult"] = ""
embed.color = 0x4C90F3
embed.set_footer(text="{adult}Category: {category} | Tags: {tags}"
"".format(**data))
return embed
def enable_or_disable_if_active(self, streams, stream, channel, _id=None):
"""Returns True if enabled or False if disabled"""
for i, s in enumerate(streams):
stream_id = s.get("ID")
if stream_id and _id: # ID is available, matching by ID is
if stream_id != _id: # preferable
continue
else: # ID unavailable, matching by name
if s["NAME"] != stream:
continue
if channel.id in s["CHANNELS"]:
streams[i]["CHANNELS"].remove(channel.id)
if not s["CHANNELS"]:
streams.remove(s)
return False
else:
streams[i]["CHANNELS"].append(channel.id)
return True
data = {"CHANNELS": [channel.id],
"NAME": stream,
"ALREADY_ONLINE": False}
if _id:
data["ID"] = _id
streams.append(data)
return True
async def stream_checker(self):
CHECK_DELAY = 60
try:
await self._migration_twitch_v5()
except InvalidCredentials:
print("Error during convertion of twitch usernames to IDs: "
"invalid token")
except Exception as e:
print("Error during convertion of twitch usernames to IDs: "
"{}".format(e))
while self == self.bot.get_cog("Streams"):
save = False
streams = ((self.twitch_streams, self.twitch_online),
(self.hitbox_streams, self.hitbox_online),
(self.mixer_streams, self.mixer_online),
(self.picarto_streams, self.picarto_online))
for streams_list, parser in streams:
if parser == self.twitch_online:
_type = "ID"
else:
_type = "NAME"
for stream in streams_list:
if _type not in stream:
continue
key = (parser, stream[_type])
try:
embed = await parser(stream[_type])
except OfflineStream:
if stream["ALREADY_ONLINE"]:
stream["ALREADY_ONLINE"] = False
save = True
await self.delete_old_notifications(key)
except: # We don't want our task to die
continue
else:
if stream["ALREADY_ONLINE"]:
continue
save = True
stream["ALREADY_ONLINE"] = True
messages_sent = []
for channel_id in stream["CHANNELS"]:
channel = self.bot.get_channel(channel_id)
if channel is None:
continue
mention = self.settings.get(channel.server.id, {}).get("MENTION", "")
can_speak = channel.permissions_for(channel.server.me).send_messages
message = mention + " {} is live!".format(stream["NAME"])
if channel and can_speak:
m = await self.bot.send_message(channel, message, embed=embed)
messages_sent.append(m)
self.messages_cache[key] = messages_sent
await asyncio.sleep(0.5)
if save:
dataIO.save_json("data/streams/twitch.json", self.twitch_streams)
dataIO.save_json("data/streams/hitbox.json", self.hitbox_streams)
dataIO.save_json("data/streams/beam.json", self.mixer_streams)
dataIO.save_json("data/streams/picarto.json", self.picarto_streams)
await asyncio.sleep(CHECK_DELAY)
async def delete_old_notifications(self, key):
for message in self.messages_cache[key]:
server = message.server
settings = self.settings.get(server.id, {})
is_enabled = settings.get("AUTODELETE", True)
try:
if is_enabled:
await self.bot.delete_message(message)
except:
pass
del self.messages_cache[key]
def rnd_attr(self):
"""Avoids Discord's caching"""
return "?rnd=" + "".join([choice(ascii_letters) for i in range(6)])
async def _migration_twitch_v5(self):
# Migration of old twitch streams to API v5
to_convert = []
for stream in self.twitch_streams:
if "ID" not in stream:
to_convert.append(stream["NAME"])
if not to_convert:
return
results = await self.fetch_twitch_ids(*to_convert)
for stream in self.twitch_streams:
for result in results:
if stream["NAME"].lower() == result["name"].lower():
stream["ID"] = result["_id"]
# We might as well delete the invalid / renamed ones
self.twitch_streams = [s for s in self.twitch_streams if "ID" in s]
dataIO.save_json("data/streams/twitch.json", self.twitch_streams)
def check_folders():
if not os.path.exists("data/streams"):
print("Creating data/streams folder...")
os.makedirs("data/streams")
def check_files():
stream_files = (
"twitch.json",
"hitbox.json",
"beam.json",
"picarto.json"
)
for filename in stream_files:
if not dataIO.is_valid_json("data/streams/" + filename):
print("Creating empty {}...".format(filename))
dataIO.save_json("data/streams/" + filename, [])
f = "data/streams/settings.json"
if not dataIO.is_valid_json(f):
print("Creating empty settings.json...")
dataIO.save_json(f, {})
def setup(bot):
logger = logging.getLogger('aiohttp.client')
logger.setLevel(50) # Stops warning spam
check_folders()
check_files()
n = Streams(bot)
loop = asyncio.get_event_loop()
loop.create_task(n.stream_checker())
bot.add_cog(n)
| gpl-3.0 | -1,223,884,016,318,872,800 | -1,545,143,340,939,825,200 | 36.668097 | 97 | 0.548614 | false |
solo2101/Tilo-Menu | src/lib/tilo/backend.py | 1 | 3706 | #!/usr/bin/env python
# This application is released under the GNU General Public License
# v3 (or, at your option, any later version). You can find the full
# text of the license under http://www.gnu.org/licenses/gpl.txt.
# By using, editing and/or distributing this software you agree to
# the terms and conditions of this license.
# Thank you for using free software!
#
#(c) Whise 2009 <helderfraga@gmail.com>
#
# backend for saving and loading settings
# Part of the Tilo
import os
try:
from gi.repository import GConf
gconf_client = GConf.Client.get_default()
BACKEND = 'gconf'
print "gconf backend"
except:
BACKEND = 'xml'
import xml.dom.minidom
print "xml backend"
HomeDirectory = os.path.expanduser("~")
ConfigDirectory = HomeDirectory + '/.tilo'
gconf_app_key = '/apps/tilo'
def save_setting(name,value):
if BACKEND == 'gconf':
if isinstance(value, int) or isinstance(value, float):
gconf_client.set_int(gconf_app_key + '/' + name , int(value))
elif isinstance(value, str):
gconf_client.set_string(gconf_app_key + '/' + name , str(value))
elif isinstance(value, list):
gconf_client.set_list(gconf_app_key + '/' + name ,1, value)
elif BACKEND == 'xml':
if name == '': return
if os.path.isfile(ConfigDirectory + "/.Tilo-Settings.xml"):
XMLSettings = xml.dom.minidom.parse(ConfigDirectory + "/.Tilo-Settings.xml")
XBase = XMLSettings.getElementsByTagName('Tilo')[0]
else:
XMLSettings = xml.dom.minidom.Document()
XBase = XMLSettings.createElement('Tilo')
try:
node = XMLSettings.getElementsByTagName('settings')[0]
except:
node = XMLSettings.createElement('settings')
node.setAttribute(name, str(value))
XBase.appendChild(node)
XMLSettings.appendChild(XBase)
file = open(ConfigDirectory + "/.Tilo-Settings.xml","w")
XMLSettings.writexml(file, " ", "", "", "UTF-8")
XMLSettings.unlink()
else:
pass
def load_setting(name):
if BACKEND == 'gconf':
try:
typ = gconf_client.get_without_default(gconf_app_key + "/" + name).type
if typ == 1:
return gconf_client.get_string(gconf_app_key + "/" + name)
elif typ == 2:
return gconf_client.get_int(gconf_app_key + "/" + name)
elif typ == 6:
return gconf_client.get_list(gconf_app_key + "/" + name,1)
else:
if name == 'favorites': return []
return None
except :
if name == 'favorites': return []
return None
elif BACKEND == 'xml':
if os.path.isfile(ConfigDirectory + "/.Tilo-Settings.xml"):
XMLSettings = xml.dom.minidom.parse(ConfigDirectory + "/.Tilo-Settings.xml")
#print XMLSettings.getElementsByTagName('Tilo')[0].childNodes[0].localName
x = XMLSettings.getElementsByTagName('Tilo')[0].getElementsByTagName("settings")[0]
try:
x = x.attributes[name].value
try:
a = int(x)
except:
if str(x).find('[]') != -1 and name == 'favorites': return []
if str(x).find(':') != -1:
x = str(x).replace(" u'","").replace("u'","").replace("[","").replace("]","").replace("'","").replace('"','"')
a = x.split(',')
print a
else:
a = str(x)
return a
except:
if name == 'favorites': return []
return None
else:
return None
else:
pass
def get_default_mail_client():
if BACKEND == 'gconf':
return gconf_client.get_string("/desktop/mate/url-handlers/mailto/command")
elif BACKEND == 'xml':
return "xdg-open mailto:"
else:
pass
def get_default_internet_browser():
if BACKEND == 'gconf':
return gconf_client.get_string("/desktop/mate/url-handlers/http/command")#"/desktop/mate/applications/browser/exec")
elif BACKEND == 'xml':
return "xdg-open http:"
else:
pass
| gpl-2.0 | -5,045,919,327,658,163,000 | 7,891,407,107,451,278,000 | 24.040541 | 121 | 0.648948 | false |
gangadhar-kadam/mic-wnframework | webnotes/widgets/form/assign_to.py | 6 | 5492 | # Copyright (c) 2012 Web Notes Technologies Pvt Ltd (http://erpnext.com)
#
# MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import unicode_literals
"""assign/unassign to ToDo"""
import webnotes
@webnotes.whitelist()
def get(args=None):
"""get assigned to"""
if not args:
args = webnotes.form_dict
return webnotes.conn.sql("""select owner from `tabToDo`
where reference_type=%(doctype)s and reference_name=%(name)s
order by modified desc limit 5""", args, as_dict=1)
@webnotes.whitelist()
def add(args=None):
"""add in someone's to do list"""
if not args:
args = webnotes.form_dict
if webnotes.conn.sql("""select owner from `tabToDo`
where reference_type=%(doctype)s and reference_name=%(name)s
and owner=%(assign_to)s""", args):
webnotes.msgprint("Already in todo")
return
else:
from webnotes.model.doc import Document
from webnotes.utils import nowdate
d = Document("ToDo")
d.owner = args['assign_to']
d.reference_type = args['doctype']
d.reference_name = args['name']
d.description = args['description']
d.priority = args.get('priority', 'Medium')
d.date = args.get('date', nowdate())
d.assigned_by = args.get('assigned_by', webnotes.user.name)
d.save(1)
# set assigned_to if field exists
from webnotes.model.meta import has_field
if has_field(args['doctype'], "assigned_to"):
webnotes.conn.set_value(args['doctype'], args['name'], "assigned_to", args['assign_to'])
# notify
if not args.get("no_notification"):
notify_assignment(d.assigned_by, d.owner, d.reference_type, d.reference_name, action='ASSIGN', description=args.get("description"), notify=args.get('notify'))
# update feeed
try:
import home
from webnotes.utils import get_fullname
home.make_feed('Assignment', d.reference_type, d.reference_name, webnotes.session['user'],
'[%s] Assigned to %s' % (d.priority, get_fullname(d.owner)), '#C78F58')
except ImportError, e:
pass
return get(args)
@webnotes.whitelist()
def remove(doctype, name, assign_to):
"""remove from todo"""
res = webnotes.conn.sql("""\
select assigned_by, owner, reference_type, reference_name from `tabToDo`
where reference_type=%(doctype)s and reference_name=%(name)s
and owner=%(assign_to)s""", locals())
webnotes.conn.sql("""delete from `tabToDo`
where reference_type=%(doctype)s and reference_name=%(name)s
and owner=%(assign_to)s""", locals())
# clear assigned_to if field exists
from webnotes.model.meta import has_field
if has_field(doctype, "assigned_to"):
webnotes.conn.set_value(doctype, name, "assigned_to", None)
if res and res[0]: notify_assignment(res[0][0], res[0][1], res[0][2], res[0][3])
return get({"doctype": doctype, "name": name})
def clear(doctype, name):
for assign_to in webnotes.conn.sql_list("""select owner from `tabToDo`
where reference_type=%(doctype)s and reference_name=%(name)s""", locals()):
remove(doctype, name, assign_to)
def notify_assignment(assigned_by, owner, doc_type, doc_name, action='CLOSE',
description=None, notify=0):
"""
Notify assignee that there is a change in assignment
"""
if not (assigned_by and owner and doc_type and doc_name): return
# self assignment / closing - no message
if assigned_by==owner:
return
from webnotes.boot import get_fullnames
user_info = get_fullnames()
# Search for email address in description -- i.e. assignee
from webnotes.utils import get_url_to_form
assignment = get_url_to_form(doc_type, doc_name, label="%s: %s" % (doc_type, doc_name))
if action=='CLOSE':
if owner == webnotes.session.get('user'):
arg = {
'contact': assigned_by,
'txt': "The task %s, that you assigned to %s, has been \
closed." % (assignment,
user_info.get(owner, {}).get('fullname'))
}
else:
arg = {
'contact': assigned_by,
'txt': "The task %s, that you assigned to %s, \
has been closed by %s." % (assignment,
user_info.get(owner, {}).get('fullname'),
user_info.get(webnotes.session.get('user'),
{}).get('fullname'))
}
else:
arg = {
'contact': owner,
'txt': "A new task, %s, has been assigned to you by %s. %s" \
% (assignment,
user_info.get(webnotes.session.get('user'), {}).get('fullname'),
description and ("<p>Description: " + description + "</p>") or ""),
'notify': notify
}
arg["parenttype"] = "Assignment"
from core.page.messages import messages
import json
messages.post(json.dumps(arg))
| mit | 4,114,191,776,040,657,000 | -2,076,331,462,404,130,000 | 33.980892 | 160 | 0.691916 | false |
chromium/chromium | third_party/pylint/pylint/checkers/similar.py | 64 | 14174 | # pylint: disable=W0622
# Copyright (c) 2004-2013 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""a similarities / code duplication command line tool and pylint checker
"""
from __future__ import print_function
import sys
from collections import defaultdict
from logilab.common.ureports import Table
from pylint.interfaces import IRawChecker
from pylint.checkers import BaseChecker, table_lines_from_stats
import six
from six.moves import zip
class Similar(object):
"""finds copy-pasted lines of code in a project"""
def __init__(self, min_lines=4, ignore_comments=False,
ignore_docstrings=False, ignore_imports=False):
self.min_lines = min_lines
self.ignore_comments = ignore_comments
self.ignore_docstrings = ignore_docstrings
self.ignore_imports = ignore_imports
self.linesets = []
def append_stream(self, streamid, stream, encoding=None):
"""append a file to search for similarities"""
if encoding is None:
readlines = stream.readlines
else:
readlines = lambda: [line.decode(encoding) for line in stream]
try:
self.linesets.append(LineSet(streamid,
readlines(),
self.ignore_comments,
self.ignore_docstrings,
self.ignore_imports))
except UnicodeDecodeError:
pass
def run(self):
"""start looking for similarities and display results on stdout"""
self._display_sims(self._compute_sims())
def _compute_sims(self):
"""compute similarities in appended files"""
no_duplicates = defaultdict(list)
for num, lineset1, idx1, lineset2, idx2 in self._iter_sims():
duplicate = no_duplicates[num]
for couples in duplicate:
if (lineset1, idx1) in couples or (lineset2, idx2) in couples:
couples.add((lineset1, idx1))
couples.add((lineset2, idx2))
break
else:
duplicate.append(set([(lineset1, idx1), (lineset2, idx2)]))
sims = []
for num, ensembles in six.iteritems(no_duplicates):
for couples in ensembles:
sims.append((num, couples))
sims.sort()
sims.reverse()
return sims
def _display_sims(self, sims):
"""display computed similarities on stdout"""
nb_lignes_dupliquees = 0
for num, couples in sims:
print()
print(num, "similar lines in", len(couples), "files")
couples = sorted(couples)
for lineset, idx in couples:
print("==%s:%s" % (lineset.name, idx))
# pylint: disable=W0631
for line in lineset._real_lines[idx:idx+num]:
print(" ", line.rstrip())
nb_lignes_dupliquees += num * (len(couples)-1)
nb_total_lignes = sum([len(lineset) for lineset in self.linesets])
print("TOTAL lines=%s duplicates=%s percent=%.2f" \
% (nb_total_lignes, nb_lignes_dupliquees,
nb_lignes_dupliquees*100. / nb_total_lignes))
def _find_common(self, lineset1, lineset2):
"""find similarities in the two given linesets"""
lines1 = lineset1.enumerate_stripped
lines2 = lineset2.enumerate_stripped
find = lineset2.find
index1 = 0
min_lines = self.min_lines
while index1 < len(lineset1):
skip = 1
num = 0
for index2 in find(lineset1[index1]):
non_blank = 0
for num, ((_, line1), (_, line2)) in enumerate(
zip(lines1(index1), lines2(index2))):
if line1 != line2:
if non_blank > min_lines:
yield num, lineset1, index1, lineset2, index2
skip = max(skip, num)
break
if line1:
non_blank += 1
else:
# we may have reach the end
num += 1
if non_blank > min_lines:
yield num, lineset1, index1, lineset2, index2
skip = max(skip, num)
index1 += skip
def _iter_sims(self):
"""iterate on similarities among all files, by making a cartesian
product
"""
for idx, lineset in enumerate(self.linesets[:-1]):
for lineset2 in self.linesets[idx+1:]:
for sim in self._find_common(lineset, lineset2):
yield sim
def stripped_lines(lines, ignore_comments, ignore_docstrings, ignore_imports):
"""return lines with leading/trailing whitespace and any ignored code
features removed
"""
strippedlines = []
docstring = None
for line in lines:
line = line.strip()
if ignore_docstrings:
if not docstring and \
(line.startswith('"""') or line.startswith("'''")):
docstring = line[:3]
line = line[3:]
if docstring:
if line.endswith(docstring):
docstring = None
line = ''
if ignore_imports:
if line.startswith("import ") or line.startswith("from "):
line = ''
if ignore_comments:
# XXX should use regex in checkers/format to avoid cutting
# at a "#" in a string
line = line.split('#', 1)[0].strip()
strippedlines.append(line)
return strippedlines
class LineSet(object):
"""Holds and indexes all the lines of a single source file"""
def __init__(self, name, lines, ignore_comments=False,
ignore_docstrings=False, ignore_imports=False):
self.name = name
self._real_lines = lines
self._stripped_lines = stripped_lines(lines, ignore_comments,
ignore_docstrings,
ignore_imports)
self._index = self._mk_index()
def __str__(self):
return '<Lineset for %s>' % self.name
def __len__(self):
return len(self._real_lines)
def __getitem__(self, index):
return self._stripped_lines[index]
def __lt__(self, other):
return self.name < other.name
def __hash__(self):
return id(self)
def enumerate_stripped(self, start_at=0):
"""return an iterator on stripped lines, starting from a given index
if specified, else 0
"""
idx = start_at
if start_at:
lines = self._stripped_lines[start_at:]
else:
lines = self._stripped_lines
for line in lines:
#if line:
yield idx, line
idx += 1
def find(self, stripped_line):
"""return positions of the given stripped line in this set"""
return self._index.get(stripped_line, ())
def _mk_index(self):
"""create the index for this set"""
index = defaultdict(list)
for line_no, line in enumerate(self._stripped_lines):
if line:
index[line].append(line_no)
return index
MSGS = {'R0801': ('Similar lines in %s files\n%s',
'duplicate-code',
'Indicates that a set of similar lines has been detected \
among multiple file. This usually means that the code should \
be refactored to avoid this duplication.')}
def report_similarities(sect, stats, old_stats):
"""make a layout with some stats about duplication"""
lines = ['', 'now', 'previous', 'difference']
lines += table_lines_from_stats(stats, old_stats,
('nb_duplicated_lines',
'percent_duplicated_lines'))
sect.append(Table(children=lines, cols=4, rheaders=1, cheaders=1))
# wrapper to get a pylint checker from the similar class
class SimilarChecker(BaseChecker, Similar):
"""checks for similarities and duplicated code. This computation may be
memory / CPU intensive, so you should disable it if you experiment some
problems.
"""
__implements__ = (IRawChecker,)
# configuration section name
name = 'similarities'
# messages
msgs = MSGS
# configuration options
# for available dict keys/values see the optik parser 'add_option' method
options = (('min-similarity-lines',
{'default' : 4, 'type' : "int", 'metavar' : '<int>',
'help' : 'Minimum lines number of a similarity.'}),
('ignore-comments',
{'default' : True, 'type' : 'yn', 'metavar' : '<y or n>',
'help': 'Ignore comments when computing similarities.'}
),
('ignore-docstrings',
{'default' : True, 'type' : 'yn', 'metavar' : '<y or n>',
'help': 'Ignore docstrings when computing similarities.'}
),
('ignore-imports',
{'default' : False, 'type' : 'yn', 'metavar' : '<y or n>',
'help': 'Ignore imports when computing similarities.'}
),
)
# reports
reports = (('RP0801', 'Duplication', report_similarities),)
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
Similar.__init__(self, min_lines=4,
ignore_comments=True, ignore_docstrings=True)
self.stats = None
def set_option(self, optname, value, action=None, optdict=None):
"""method called to set an option (registered in the options list)
overridden to report options setting to Similar
"""
BaseChecker.set_option(self, optname, value, action, optdict)
if optname == 'min-similarity-lines':
self.min_lines = self.config.min_similarity_lines
elif optname == 'ignore-comments':
self.ignore_comments = self.config.ignore_comments
elif optname == 'ignore-docstrings':
self.ignore_docstrings = self.config.ignore_docstrings
elif optname == 'ignore-imports':
self.ignore_imports = self.config.ignore_imports
def open(self):
"""init the checkers: reset linesets and statistics information"""
self.linesets = []
self.stats = self.linter.add_stats(nb_duplicated_lines=0,
percent_duplicated_lines=0)
def process_module(self, node):
"""process a module
the module's content is accessible via the stream object
stream must implement the readlines method
"""
with node.stream() as stream:
self.append_stream(self.linter.current_name,
stream,
node.file_encoding)
def close(self):
"""compute and display similarities on closing (i.e. end of parsing)"""
total = sum([len(lineset) for lineset in self.linesets])
duplicated = 0
stats = self.stats
for num, couples in self._compute_sims():
msg = []
for lineset, idx in couples:
msg.append("==%s:%s" % (lineset.name, idx))
msg.sort()
# pylint: disable=W0631
for line in lineset._real_lines[idx:idx+num]:
msg.append(line.rstrip())
self.add_message('R0801', args=(len(couples), '\n'.join(msg)))
duplicated += num * (len(couples) - 1)
stats['nb_duplicated_lines'] = duplicated
stats['percent_duplicated_lines'] = total and duplicated * 100. / total
def register(linter):
"""required method to auto register this checker """
linter.register_checker(SimilarChecker(linter))
def usage(status=0):
"""display command line usage information"""
print("finds copy pasted blocks in a set of files")
print()
print('Usage: symilar [-d|--duplicates min_duplicated_lines] \
[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] file1...')
sys.exit(status)
def Run(argv=None):
"""standalone command line access point"""
if argv is None:
argv = sys.argv[1:]
from getopt import getopt
s_opts = 'hdi'
l_opts = ('help', 'duplicates=', 'ignore-comments', 'ignore-imports',
'ignore-docstrings')
min_lines = 4
ignore_comments = False
ignore_docstrings = False
ignore_imports = False
opts, args = getopt(argv, s_opts, l_opts)
for opt, val in opts:
if opt in ('-d', '--duplicates'):
min_lines = int(val)
elif opt in ('-h', '--help'):
usage()
elif opt in ('-i', '--ignore-comments'):
ignore_comments = True
elif opt in ('--ignore-docstrings',):
ignore_docstrings = True
elif opt in ('--ignore-imports',):
ignore_imports = True
if not args:
usage(1)
sim = Similar(min_lines, ignore_comments, ignore_docstrings, ignore_imports)
for filename in args:
with open(filename) as stream:
sim.append_stream(filename, stream)
sim.run()
sys.exit(0)
if __name__ == '__main__':
Run()
| bsd-3-clause | -3,791,986,653,851,028,000 | -6,467,569,512,730,732,000 | 37.102151 | 80 | 0.564484 | false |
ff94315/hiwifi-openwrt-HC5661-HC5761 | staging_dir/target-mipsel_r2_uClibc-0.9.33.2/root-ralink/usr/lib/python2.7/tokenize.py | 122 | 16465 | """Tokenization help for Python programs.
generate_tokens(readline) is a generator that breaks a stream of
text into Python tokens. It accepts a readline-like method which is called
repeatedly to get the next line of input (or "" for EOF). It generates
5-tuples with these members:
the token type (see token.py)
the token (a string)
the starting (row, column) indices of the token (a 2-tuple of ints)
the ending (row, column) indices of the token (a 2-tuple of ints)
the original line (string)
It is designed to match the working of the Python tokenizer exactly, except
that it produces COMMENT tokens for comments and gives type OP for all
operators
Older entry points
tokenize_loop(readline, tokeneater)
tokenize(readline, tokeneater=printtoken)
are the same, except instead of generating tokens, tokeneater is a callback
function to which the 5 fields described above are passed as 5 arguments,
each time a new token is found."""
__author__ = 'Ka-Ping Yee <ping@lfw.org>'
__credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '
'Skip Montanaro, Raymond Hettinger')
import string, re
from token import *
import token
__all__ = [x for x in dir(token) if not x.startswith("_")]
__all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
del x
del token
COMMENT = N_TOKENS
tok_name[COMMENT] = 'COMMENT'
NL = N_TOKENS + 1
tok_name[NL] = 'NL'
N_TOKENS += 2
def group(*choices): return '(' + '|'.join(choices) + ')'
def any(*choices): return group(*choices) + '*'
def maybe(*choices): return group(*choices) + '?'
Whitespace = r'[ \f\t]*'
Comment = r'#[^\r\n]*'
Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
Name = r'[a-zA-Z_]\w*'
Hexnumber = r'0[xX][\da-fA-F]+[lL]?'
Octnumber = r'(0[oO][0-7]+)|(0[0-7]*)[lL]?'
Binnumber = r'0[bB][01]+[lL]?'
Decnumber = r'[1-9]\d*[lL]?'
Intnumber = group(Hexnumber, Binnumber, Octnumber, Decnumber)
Exponent = r'[eE][-+]?\d+'
Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent)
Expfloat = r'\d+' + Exponent
Floatnumber = group(Pointfloat, Expfloat)
Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]')
Number = group(Imagnumber, Floatnumber, Intnumber)
# Tail end of ' string.
Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
# Tail end of " string.
Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
# Tail end of ''' string.
Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
# Tail end of """ string.
Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
Triple = group("[uU]?[rR]?'''", '[uU]?[rR]?"""')
# Single-line ' or " string.
String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
# Because of leftmost-then-longest match semantics, be sure to put the
# longest operators first (e.g., if = came before ==, == would get
# recognized as two instances of =).
Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=",
r"//=?",
r"[+\-*/%&|^=<>]=?",
r"~")
Bracket = '[][(){}]'
Special = group(r'\r?\n', r'[:;.,`@]')
Funny = group(Operator, Bracket, Special)
PlainToken = group(Number, Funny, String, Name)
Token = Ignore + PlainToken
# First (or only) line of ' or " string.
ContStr = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
group("'", r'\\\r?\n'),
r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
group('"', r'\\\r?\n'))
PseudoExtras = group(r'\\\r?\n', Comment, Triple)
PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
tokenprog, pseudoprog, single3prog, double3prog = map(
re.compile, (Token, PseudoToken, Single3, Double3))
endprogs = {"'": re.compile(Single), '"': re.compile(Double),
"'''": single3prog, '"""': double3prog,
"r'''": single3prog, 'r"""': double3prog,
"u'''": single3prog, 'u"""': double3prog,
"ur'''": single3prog, 'ur"""': double3prog,
"R'''": single3prog, 'R"""': double3prog,
"U'''": single3prog, 'U"""': double3prog,
"uR'''": single3prog, 'uR"""': double3prog,
"Ur'''": single3prog, 'Ur"""': double3prog,
"UR'''": single3prog, 'UR"""': double3prog,
"b'''": single3prog, 'b"""': double3prog,
"br'''": single3prog, 'br"""': double3prog,
"B'''": single3prog, 'B"""': double3prog,
"bR'''": single3prog, 'bR"""': double3prog,
"Br'''": single3prog, 'Br"""': double3prog,
"BR'''": single3prog, 'BR"""': double3prog,
'r': None, 'R': None, 'u': None, 'U': None,
'b': None, 'B': None}
triple_quoted = {}
for t in ("'''", '"""',
"r'''", 'r"""', "R'''", 'R"""',
"u'''", 'u"""', "U'''", 'U"""',
"ur'''", 'ur"""', "Ur'''", 'Ur"""',
"uR'''", 'uR"""', "UR'''", 'UR"""',
"b'''", 'b"""', "B'''", 'B"""',
"br'''", 'br"""', "Br'''", 'Br"""',
"bR'''", 'bR"""', "BR'''", 'BR"""'):
triple_quoted[t] = t
single_quoted = {}
for t in ("'", '"',
"r'", 'r"', "R'", 'R"',
"u'", 'u"', "U'", 'U"',
"ur'", 'ur"', "Ur'", 'Ur"',
"uR'", 'uR"', "UR'", 'UR"',
"b'", 'b"', "B'", 'B"',
"br'", 'br"', "Br'", 'Br"',
"bR'", 'bR"', "BR'", 'BR"' ):
single_quoted[t] = t
tabsize = 8
class TokenError(Exception): pass
class StopTokenizing(Exception): pass
def printtoken(type, token, srow_scol, erow_ecol, line): # for testing
srow, scol = srow_scol
erow, ecol = erow_ecol
print "%d,%d-%d,%d:\t%s\t%s" % \
(srow, scol, erow, ecol, tok_name[type], repr(token))
def tokenize(readline, tokeneater=printtoken):
"""
The tokenize() function accepts two parameters: one representing the
input stream, and one providing an output mechanism for tokenize().
The first parameter, readline, must be a callable object which provides
the same interface as the readline() method of built-in file objects.
Each call to the function should return one line of input as a string.
The second parameter, tokeneater, must also be a callable object. It is
called once for each token, with five arguments, corresponding to the
tuples generated by generate_tokens().
"""
try:
tokenize_loop(readline, tokeneater)
except StopTokenizing:
pass
# backwards compatible interface
def tokenize_loop(readline, tokeneater):
for token_info in generate_tokens(readline):
tokeneater(*token_info)
class Untokenizer:
def __init__(self):
self.tokens = []
self.prev_row = 1
self.prev_col = 0
def add_whitespace(self, start):
row, col = start
assert row <= self.prev_row
col_offset = col - self.prev_col
if col_offset:
self.tokens.append(" " * col_offset)
def untokenize(self, iterable):
for t in iterable:
if len(t) == 2:
self.compat(t, iterable)
break
tok_type, token, start, end, line = t
self.add_whitespace(start)
self.tokens.append(token)
self.prev_row, self.prev_col = end
if tok_type in (NEWLINE, NL):
self.prev_row += 1
self.prev_col = 0
return "".join(self.tokens)
def compat(self, token, iterable):
startline = False
indents = []
toks_append = self.tokens.append
toknum, tokval = token
if toknum in (NAME, NUMBER):
tokval += ' '
if toknum in (NEWLINE, NL):
startline = True
prevstring = False
for tok in iterable:
toknum, tokval = tok[:2]
if toknum in (NAME, NUMBER):
tokval += ' '
# Insert a space between two consecutive strings
if toknum == STRING:
if prevstring:
tokval = ' ' + tokval
prevstring = True
else:
prevstring = False
if toknum == INDENT:
indents.append(tokval)
continue
elif toknum == DEDENT:
indents.pop()
continue
elif toknum in (NEWLINE, NL):
startline = True
elif startline and indents:
toks_append(indents[-1])
startline = False
toks_append(tokval)
def untokenize(iterable):
"""Transform tokens back into Python source code.
Each element returned by the iterable must be a token sequence
with at least two elements, a token number and token value. If
only two tokens are passed, the resulting output is poor.
Round-trip invariant for full input:
Untokenized source will match input source exactly
Round-trip invariant for limited intput:
# Output text will tokenize the back to the input
t1 = [tok[:2] for tok in generate_tokens(f.readline)]
newcode = untokenize(t1)
readline = iter(newcode.splitlines(1)).next
t2 = [tok[:2] for tok in generate_tokens(readline)]
assert t1 == t2
"""
ut = Untokenizer()
return ut.untokenize(iterable)
def generate_tokens(readline):
"""
The generate_tokens() generator requires one argment, readline, which
must be a callable object which provides the same interface as the
readline() method of built-in file objects. Each call to the function
should return one line of input as a string. Alternately, readline
can be a callable function terminating with StopIteration:
readline = open(myfile).next # Example of alternate readline
The generator produces 5-tuples with these members: the token type; the
token string; a 2-tuple (srow, scol) of ints specifying the row and
column where the token begins in the source; a 2-tuple (erow, ecol) of
ints specifying the row and column where the token ends in the source;
and the line on which the token was found. The line passed is the
logical line; continuation lines are included.
"""
lnum = parenlev = continued = 0
namechars, numchars = string.ascii_letters + '_', '0123456789'
contstr, needcont = '', 0
contline = None
indents = [0]
while 1: # loop over lines in stream
try:
line = readline()
except StopIteration:
line = ''
lnum += 1
pos, max = 0, len(line)
if contstr: # continued string
if not line:
raise TokenError, ("EOF in multi-line string", strstart)
endmatch = endprog.match(line)
if endmatch:
pos = end = endmatch.end(0)
yield (STRING, contstr + line[:end],
strstart, (lnum, end), contline + line)
contstr, needcont = '', 0
contline = None
elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
yield (ERRORTOKEN, contstr + line,
strstart, (lnum, len(line)), contline)
contstr = ''
contline = None
continue
else:
contstr = contstr + line
contline = contline + line
continue
elif parenlev == 0 and not continued: # new statement
if not line: break
column = 0
while pos < max: # measure leading whitespace
if line[pos] == ' ':
column += 1
elif line[pos] == '\t':
column = (column//tabsize + 1)*tabsize
elif line[pos] == '\f':
column = 0
else:
break
pos += 1
if pos == max:
break
if line[pos] in '#\r\n': # skip comments or blank lines
if line[pos] == '#':
comment_token = line[pos:].rstrip('\r\n')
nl_pos = pos + len(comment_token)
yield (COMMENT, comment_token,
(lnum, pos), (lnum, pos + len(comment_token)), line)
yield (NL, line[nl_pos:],
(lnum, nl_pos), (lnum, len(line)), line)
else:
yield ((NL, COMMENT)[line[pos] == '#'], line[pos:],
(lnum, pos), (lnum, len(line)), line)
continue
if column > indents[-1]: # count indents or dedents
indents.append(column)
yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
while column < indents[-1]:
if column not in indents:
raise IndentationError(
"unindent does not match any outer indentation level",
("<tokenize>", lnum, pos, line))
indents = indents[:-1]
yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
else: # continued statement
if not line:
raise TokenError, ("EOF in multi-line statement", (lnum, 0))
continued = 0
while pos < max:
pseudomatch = pseudoprog.match(line, pos)
if pseudomatch: # scan for tokens
start, end = pseudomatch.span(1)
spos, epos, pos = (lnum, start), (lnum, end), end
token, initial = line[start:end], line[start]
if initial in numchars or \
(initial == '.' and token != '.'): # ordinary number
yield (NUMBER, token, spos, epos, line)
elif initial in '\r\n':
yield (NL if parenlev > 0 else NEWLINE,
token, spos, epos, line)
elif initial == '#':
assert not token.endswith("\n")
yield (COMMENT, token, spos, epos, line)
elif token in triple_quoted:
endprog = endprogs[token]
endmatch = endprog.match(line, pos)
if endmatch: # all on one line
pos = endmatch.end(0)
token = line[start:pos]
yield (STRING, token, spos, (lnum, pos), line)
else:
strstart = (lnum, start) # multiple lines
contstr = line[start:]
contline = line
break
elif initial in single_quoted or \
token[:2] in single_quoted or \
token[:3] in single_quoted:
if token[-1] == '\n': # continued string
strstart = (lnum, start)
endprog = (endprogs[initial] or endprogs[token[1]] or
endprogs[token[2]])
contstr, needcont = line[start:], 1
contline = line
break
else: # ordinary string
yield (STRING, token, spos, epos, line)
elif initial in namechars: # ordinary name
yield (NAME, token, spos, epos, line)
elif initial == '\\': # continued stmt
continued = 1
else:
if initial in '([{':
parenlev += 1
elif initial in ')]}':
parenlev -= 1
yield (OP, token, spos, epos, line)
else:
yield (ERRORTOKEN, line[pos],
(lnum, pos), (lnum, pos+1), line)
pos += 1
for indent in indents[1:]: # pop remaining indent levels
yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
if __name__ == '__main__': # testing
import sys
if len(sys.argv) > 1:
tokenize(open(sys.argv[1]).readline)
else:
tokenize(sys.stdin.readline)
| gpl-2.0 | 5,658,436,123,338,514,000 | -305,933,455,987,486,140 | 37.832547 | 79 | 0.501245 | false |
pando85/gourmet | gourmet/plugins/field_editor/__init__.py | 7 | 1175 | from gourmet.plugin import ToolPlugin
import fieldEditor
import gtk
from gettext import gettext as _
class FieldEditorPlugin (ToolPlugin):
menu_items = '''<placeholder name="DataTool">
<menuitem action="FieldEditor"/>
</placeholder>
'''
def setup_action_groups (self):
self.action_group = gtk.ActionGroup('FieldEditorPluginActionGroup')
self.action_group.add_actions([
('FieldEditor',None,_('Field Editor'),
None,_('Edit fields across multiple recipes at a time.'),self.show_field_editor
),
])
self.action_groups.append(self.action_group)
def show_field_editor (self, *args):
from gourmet.GourmetRecipeManager import get_application
self.app = get_application()
self.field_editor = fieldEditor.FieldEditor(self.app.rd, self.app)
self.field_editor.valueDialog.connect('response',self.response_cb)
self.field_editor.show()
def response_cb (self, d, r):
if r==gtk.RESPONSE_APPLY:
self.app.update_attribute_models()
plugins = [FieldEditorPlugin]
| gpl-2.0 | -641,102,965,054,444,200 | 5,458,926,048,630,587,000 | 31.638889 | 92 | 0.621277 | false |
christian-posta/openshift-ansible | filter_plugins/oo_filters.py | 14 | 14087 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# vim: expandtab:tabstop=4:shiftwidth=4
'''
Custom filters for use in openshift-ansible
'''
from ansible import errors
from operator import itemgetter
import pdb
import re
import json
class FilterModule(object):
''' Custom ansible filters '''
@staticmethod
def oo_pdb(arg):
''' This pops you into a pdb instance where arg is the data passed in
from the filter.
Ex: "{{ hostvars | oo_pdb }}"
'''
pdb.set_trace()
return arg
@staticmethod
def get_attr(data, attribute=None):
''' This looks up dictionary attributes of the form a.b.c and returns
the value.
Ex: data = {'a': {'b': {'c': 5}}}
attribute = "a.b.c"
returns 5
'''
if not attribute:
raise errors.AnsibleFilterError("|failed expects attribute to be set")
ptr = data
for attr in attribute.split('.'):
ptr = ptr[attr]
return ptr
@staticmethod
def oo_flatten(data):
''' This filter plugin will flatten a list of lists
'''
if not issubclass(type(data), list):
raise errors.AnsibleFilterError("|failed expects to flatten a List")
return [item for sublist in data for item in sublist]
@staticmethod
def oo_collect(data, attribute=None, filters=None):
''' This takes a list of dict and collects all attributes specified into a
list. If filter is specified then we will include all items that
match _ALL_ of filters. If a dict entry is missing the key in a
filter it will be excluded from the match.
Ex: data = [ {'a':1, 'b':5, 'z': 'z'}, # True, return
{'a':2, 'z': 'z'}, # True, return
{'a':3, 'z': 'z'}, # True, return
{'a':4, 'z': 'b'}, # FAILED, obj['z'] != obj['z']
]
attribute = 'a'
filters = {'z': 'z'}
returns [1, 2, 3]
'''
if not issubclass(type(data), list):
raise errors.AnsibleFilterError("|failed expects to filter on a List")
if not attribute:
raise errors.AnsibleFilterError("|failed expects attribute to be set")
if filters is not None:
if not issubclass(type(filters), dict):
raise errors.AnsibleFilterError("|failed expects filter to be a"
" dict")
retval = [FilterModule.get_attr(d, attribute) for d in data if (
all([d.get(key, None) == filters[key] for key in filters]))]
else:
retval = [FilterModule.get_attr(d, attribute) for d in data]
return retval
@staticmethod
def oo_select_keys_from_list(data, keys):
''' This returns a list, which contains the value portions for the keys
Ex: data = { 'a':1, 'b':2, 'c':3 }
keys = ['a', 'c']
returns [1, 3]
'''
if not issubclass(type(data), list):
raise errors.AnsibleFilterError("|failed expects to filter on a list")
if not issubclass(type(keys), list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
# Gather up the values for the list of keys passed in
retval = [FilterModule.oo_select_keys(item, keys) for item in data]
return FilterModule.oo_flatten(retval)
@staticmethod
def oo_select_keys(data, keys):
''' This returns a list, which contains the value portions for the keys
Ex: data = { 'a':1, 'b':2, 'c':3 }
keys = ['a', 'c']
returns [1, 3]
'''
if not issubclass(type(data), dict):
raise errors.AnsibleFilterError("|failed expects to filter on a dict")
if not issubclass(type(keys), list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
# Gather up the values for the list of keys passed in
retval = [data[key] for key in keys if data.has_key(key)]
return retval
@staticmethod
def oo_prepend_strings_in_list(data, prepend):
''' This takes a list of strings and prepends a string to each item in the
list
Ex: data = ['cart', 'tree']
prepend = 'apple-'
returns ['apple-cart', 'apple-tree']
'''
if not issubclass(type(data), list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
if not all(isinstance(x, basestring) for x in data):
raise errors.AnsibleFilterError("|failed expects first param is a list"
" of strings")
retval = [prepend + s for s in data]
return retval
@staticmethod
def oo_combine_key_value(data, joiner='='):
'''Take a list of dict in the form of { 'key': 'value'} and
arrange them as a list of strings ['key=value']
'''
if not issubclass(type(data), list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
rval = []
for item in data:
rval.append("%s%s%s" % (item['key'], joiner, item['value']))
return rval
@staticmethod
def oo_combine_dict(data, in_joiner='=', out_joiner=' '):
'''Take a dict in the form of { 'key': 'value', 'key': 'value' } and
arrange them as a string 'key=value key=value'
'''
if not issubclass(type(data), dict):
raise errors.AnsibleFilterError("|failed expects first param is a dict")
return out_joiner.join([in_joiner.join([k, v]) for k, v in data.items()])
@staticmethod
def oo_ami_selector(data, image_name):
''' This takes a list of amis and an image name and attempts to return
the latest ami.
'''
if not issubclass(type(data), list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
if not data:
return None
else:
if image_name is None or not image_name.endswith('_*'):
ami = sorted(data, key=itemgetter('name'), reverse=True)[0]
return ami['ami_id']
else:
ami_info = [(ami, ami['name'].split('_')[-1]) for ami in data]
ami = sorted(ami_info, key=itemgetter(1), reverse=True)[0][0]
return ami['ami_id']
@staticmethod
def oo_ec2_volume_definition(data, host_type, docker_ephemeral=False):
''' This takes a dictionary of volume definitions and returns a valid ec2
volume definition based on the host_type and the values in the
dictionary.
The dictionary should look similar to this:
{ 'master':
{ 'root':
{ 'volume_size': 10, 'device_type': 'gp2',
'iops': 500
}
},
'node':
{ 'root':
{ 'volume_size': 10, 'device_type': 'io1',
'iops': 1000
},
'docker':
{ 'volume_size': 40, 'device_type': 'gp2',
'iops': 500, 'ephemeral': 'true'
}
}
}
'''
if not issubclass(type(data), dict):
raise errors.AnsibleFilterError("|failed expects first param is a dict")
if host_type not in ['master', 'node', 'etcd']:
raise errors.AnsibleFilterError("|failed expects etcd, master or node"
" as the host type")
root_vol = data[host_type]['root']
root_vol['device_name'] = '/dev/sda1'
root_vol['delete_on_termination'] = True
if root_vol['device_type'] != 'io1':
root_vol.pop('iops', None)
if host_type == 'node':
docker_vol = data[host_type]['docker']
docker_vol['device_name'] = '/dev/xvdb'
docker_vol['delete_on_termination'] = True
if docker_vol['device_type'] != 'io1':
docker_vol.pop('iops', None)
if docker_ephemeral:
docker_vol.pop('device_type', None)
docker_vol.pop('delete_on_termination', None)
docker_vol['ephemeral'] = 'ephemeral0'
return [root_vol, docker_vol]
elif host_type == 'etcd':
etcd_vol = data[host_type]['etcd']
etcd_vol['device_name'] = '/dev/xvdb'
etcd_vol['delete_on_termination'] = True
if etcd_vol['device_type'] != 'io1':
etcd_vol.pop('iops', None)
return [root_vol, etcd_vol]
return [root_vol]
@staticmethod
def oo_split(string, separator=','):
''' This splits the input string into a list
'''
return string.split(separator)
@staticmethod
def oo_filter_list(data, filter_attr=None):
''' This returns a list, which contains all items where filter_attr
evaluates to true
Ex: data = [ { a: 1, b: True },
{ a: 3, b: False },
{ a: 5, b: True } ]
filter_attr = 'b'
returns [ { a: 1, b: True },
{ a: 5, b: True } ]
'''
if not issubclass(type(data), list):
raise errors.AnsibleFilterError("|failed expects to filter on a list")
if not issubclass(type(filter_attr), str):
raise errors.AnsibleFilterError("|failed expects filter_attr is a str")
# Gather up the values for the list of keys passed in
return [x for x in data if x[filter_attr]]
@staticmethod
def oo_parse_heat_stack_outputs(data):
''' Formats the HEAT stack output into a usable form
The goal is to transform something like this:
+---------------+-------------------------------------------------+
| Property | Value |
+---------------+-------------------------------------------------+
| capabilities | [] | |
| creation_time | 2015-06-26T12:26:26Z | |
| description | OpenShift cluster | |
| … | … |
| outputs | [ |
| | { |
| | "output_value": "value_A" |
| | "description": "This is the value of Key_A" |
| | "output_key": "Key_A" |
| | }, |
| | { |
| | "output_value": [ |
| | "value_B1", |
| | "value_B2" |
| | ], |
| | "description": "This is the value of Key_B" |
| | "output_key": "Key_B" |
| | }, |
| | ] |
| parameters | { |
| … | … |
+---------------+-------------------------------------------------+
into something like this:
{
"Key_A": "value_A",
"Key_B": [
"value_B1",
"value_B2"
]
}
'''
# Extract the “outputs” JSON snippet from the pretty-printed array
in_outputs = False
outputs = ''
line_regex = re.compile(r'\|\s*(.*?)\s*\|\s*(.*?)\s*\|')
for line in data['stdout_lines']:
match = line_regex.match(line)
if match:
if match.group(1) == 'outputs':
in_outputs = True
elif match.group(1) != '':
in_outputs = False
if in_outputs:
outputs += match.group(2)
outputs = json.loads(outputs)
# Revamp the “outputs” to put it in the form of a “Key: value” map
revamped_outputs = {}
for output in outputs:
revamped_outputs[output['output_key']] = output['output_value']
return revamped_outputs
def filters(self):
''' returns a mapping of filters to methods '''
return {
"oo_select_keys": self.oo_select_keys,
"oo_select_keys_from_list": self.oo_select_keys_from_list,
"oo_collect": self.oo_collect,
"oo_flatten": self.oo_flatten,
"oo_pdb": self.oo_pdb,
"oo_prepend_strings_in_list": self.oo_prepend_strings_in_list,
"oo_ami_selector": self.oo_ami_selector,
"oo_ec2_volume_definition": self.oo_ec2_volume_definition,
"oo_combine_key_value": self.oo_combine_key_value,
"oo_combine_dict": self.oo_combine_dict,
"oo_split": self.oo_split,
"oo_filter_list": self.oo_filter_list,
"oo_parse_heat_stack_outputs": self.oo_parse_heat_stack_outputs
}
| apache-2.0 | 7,927,040,593,778,460,000 | 1,099,497,424,227,918,300 | 39.656069 | 84 | 0.464065 | false |
updownlife/multipleK | dependencies/biopython-1.65/build/lib.linux-x86_64-2.7/Bio/PopGen/FDist/Utils.py | 3 | 6908 | # Copyright 2007 by Tiago Antao <tiagoantao@gmail.com>. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
from Bio.PopGen.GenePop import FileParser
import Bio.PopGen.FDist
# Quite a few utility functions could be done (like remove pop,
# add locus, etc...). The recommended strategy is convert back
# and forth from/to GenePop and use GenePop Utils
def convert_genepop_to_fdist(gp_rec, report_pops=None):
"""Converts a GenePop record to a FDist one.
Parameters:
gp_rec - Genepop Record (either standard or big)
Returns:
FDist record.
"""
if hasattr(gp_rec, "populations"):
return _convert_genepop_to_fdist(gp_rec)
else:
return _convert_genepop_to_fdist_big(gp_rec, report_pops)
def _convert_genepop_to_fdist(gp_rec):
"""Converts a standard GenePop record to a FDist one.
Parameters:
gp_rec - Genepop Record (Standard)
Returns:
FDist record.
"""
fd_rec = Bio.PopGen.FDist.Record()
fd_rec.data_org = 0
fd_rec.num_loci = len(gp_rec.loci_list)
fd_rec.num_pops = len(gp_rec.populations)
for lc_i in range(len(gp_rec.loci_list)):
alleles = []
pop_data = []
for pop_i in range(len(gp_rec.populations)):
for indiv in gp_rec.populations[pop_i]:
for al in indiv[1][lc_i]:
if al is not None and al not in alleles:
alleles.append(al)
alleles.sort() # Dominance requires this
# here we go again (necessary...)
for pop_i in range(len(gp_rec.populations)):
allele_counts = {}
for indiv in gp_rec.populations[pop_i]:
for al in indiv[1][lc_i]:
if al is not None:
count = allele_counts.get(al, 0)
allele_counts[al] = count + 1
allele_array = [] # We need the same order as in alleles
for allele in alleles:
allele_array.append(allele_counts.get(allele, 0))
pop_data.append(allele_array)
fd_rec.loci_data.append((len(alleles), pop_data))
return fd_rec
def _convert_genepop_to_fdist_big(gp_rec, report_pops=None):
"""Converts a big GenePop record to a FDist one.
Parameters:
gp_rec - Genepop Record (Big)
Returns:
FDist record.
"""
fd_rec = Bio.PopGen.FDist.Record()
fd_rec.data_org = 1
fd_rec.num_loci = len(gp_rec.loci_list)
num_loci = len(gp_rec.loci_list)
loci = []
for i in range(num_loci):
loci.append(set())
pops = []
work_rec = FileParser.read(gp_rec.fname)
lParser = work_rec.get_individual()
def init_pop():
my_pop = []
for i in range(num_loci):
my_pop.append({})
return my_pop
curr_pop = init_pop()
num_pops = 1
if report_pops:
report_pops(num_pops)
while lParser:
if lParser is not True:
for loci_pos in range(num_loci):
for al in lParser[1][loci_pos]:
if al is not None:
loci[loci_pos].add(al)
curr_pop[loci_pos][al]= curr_pop[loci_pos].get(al, 0)+1
else:
pops.append(curr_pop)
num_pops += 1
if report_pops:
report_pops(num_pops)
curr_pop = init_pop()
lParser = work_rec.get_individual()
work_rec._handle.close() # TODO - Needs a proper fix
pops.append(curr_pop)
fd_rec.num_pops = num_pops
for loci_pos in range(num_loci):
alleles = sorted(loci[loci_pos])
loci_rec = [len(alleles), []]
for pop in pops:
pop_rec = []
for allele in alleles:
pop_rec.append(pop[loci_pos].get(allele, 0))
loci_rec[1].append(pop_rec)
fd_rec.loci_data.append(tuple(loci_rec))
return fd_rec
def _convert_genepop_to_fdist_big_old(gp_rec, report_loci=None):
"""Converts a big GenePop record to a FDist one.
Parameters:
gp_rec - Genepop Record (Big)
Returns:
FDist record.
"""
fd_rec = Bio.PopGen.FDist.Record()
def countPops(rec):
f2 = FileParser.read(rec.fname)
popCnt = 1
while f2.skip_population():
popCnt += 1
return popCnt
fd_rec.data_org = 0
fd_rec.num_loci = len(gp_rec.loci_list)
work_rec0 = FileParser.read(gp_rec.fname)
fd_rec.num_pops = countPops(work_rec0)
num_loci = len(gp_rec.loci_list)
for lc_i in range(num_loci):
if report_loci:
report_loci(lc_i, num_loci)
work_rec = FileParser.read(gp_rec.fname)
work_rec2 = FileParser.read(gp_rec.fname)
alleles = []
pop_data = []
lParser = work_rec.get_individual()
while lParser:
if lParser is not True:
for al in lParser[1][lc_i]:
if al is not None and al not in alleles:
alleles.append(al)
lParser = work_rec.get_individual()
# here we go again (necessary...)
alleles.sort()
def process_pop(pop_data, alleles, allele_counts):
allele_array = [] # We need the same order as in alleles
for allele in alleles:
allele_array.append(allele_counts.get(allele, 0))
pop_data.append(allele_array)
lParser = work_rec2.get_individual()
allele_counts = {}
for allele in alleles:
allele_counts[allele] = 0
allele_counts[None]=0
while lParser:
if lParser is True:
process_pop(pop_data, alleles, allele_counts)
allele_counts = {}
for allele in alleles:
allele_counts[allele] = 0
allele_counts[None]=0
else:
for al in lParser[1][lc_i]:
allele_counts[al] += 1
lParser = work_rec2.get_individual()
process_pop(pop_data, alleles, allele_counts)
fd_rec.loci_data.append((len(alleles), pop_data))
return fd_rec
def approximate_fst(desired_fst, simulated_fst, parameter_fst,
max_run_fst=1, min_run_fst=0, limit=0.005):
"""Calculates the next Fst attempt in order to approximate a
desired Fst.
"""
if abs(simulated_fst - desired_fst) < limit:
return parameter_fst, max_run_fst, min_run_fst
if simulated_fst > desired_fst:
max_run_fst = parameter_fst
next_parameter_fst = (min_run_fst + parameter_fst)/2
else:
min_run_fst = parameter_fst
next_parameter_fst = (max_run_fst + parameter_fst)/2
return next_parameter_fst, max_run_fst, min_run_fst
| gpl-2.0 | -6,696,385,379,240,842,000 | 1,690,733,896,511,935,000 | 31.584906 | 79 | 0.567313 | false |
andrius-preimantas/odoo | addons/mrp/report/price.py | 39 | 11481 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
import openerp
from openerp.report.interface import report_rml
from openerp.tools import to_xml
from openerp.report import report_sxw
from datetime import datetime
from openerp.tools.translate import _
class report_custom(report_rml):
def create_xml(self, cr, uid, ids, datas, context=None):
number = (datas.get('form', False) and datas['form']['number']) or 1
registry = openerp.registry(cr.dbname)
product_pool = registry.get('product.product')
product_uom_pool = registry.get('product.uom')
workcenter_pool = registry.get('mrp.workcenter')
user_pool = registry.get('res.users')
bom_pool = registry.get('mrp.bom')
pricelist_pool = registry.get('product.pricelist')
rml_obj=report_sxw.rml_parse(cr, uid, product_pool._name,context)
rml_obj.localcontext.update({'lang':context.get('lang',False)})
company_currency = user_pool.browse(cr, uid, uid).company_id.currency_id
company_currency_symbol = company_currency.symbol or company_currency.name
def process_bom(bom, currency_id, factor=1):
xml = '<row>'
sum = 0
sum_strd = 0
prod = product_pool.browse(cr, uid, bom['product_id'])
prod_name = to_xml(bom['name'])
prod_qtty = factor * bom['product_qty']
product_uom = product_uom_pool.browse(cr, uid, bom['product_uom'], context=context)
product_uom_name = to_xml(product_uom.name)
main_sp_price, main_sp_name , main_strd_price = '','',''
sellers, sellers_price = '',''
if prod.seller_id:
main_sp_name = '- <b>'+ to_xml(prod.seller_id.name) +'</b>\r\n'
pricelist = prod.seller_id.property_product_pricelist_purchase
price = pricelist_pool.price_get(cr,uid,[pricelist.id],
prod.id, number*prod_qtty or 1.0, prod.seller_id.id, {
'uom': prod.uom_po_id.id,
'date': time.strftime('%Y-%m-%d'),
})[pricelist.id]
main_sp_price = """<b>"""+rml_obj.formatLang(price)+' '+ (company_currency_symbol)+"""</b>\r\n"""
sum += prod_qtty*price
std_price = product_uom_pool._compute_price(cr, uid, prod.uom_id.id, prod.standard_price, to_uom_id=product_uom.id)
main_strd_price = str(std_price) + '\r\n'
sum_strd = prod_qtty*std_price
for seller_id in prod.seller_ids:
if seller_id.name.id == prod.seller_id.id:
continue
sellers += '- <i>'+ to_xml(seller_id.name.name) +'</i>\r\n'
pricelist = seller_id.name.property_product_pricelist_purchase
price = pricelist_pool.price_get(cr,uid,[pricelist.id],
prod.id, number*prod_qtty or 1.0, seller_id.name.id, {
'uom': prod.uom_po_id.id,
'date': time.strftime('%Y-%m-%d'),
})[pricelist.id]
sellers_price += """<i>"""+rml_obj.formatLang(price) +' '+ (company_currency_symbol) +"""</i>\r\n"""
xml += """<col para='yes'> """+ prod_name +""" </col>
<col para='yes'> """+ main_sp_name + sellers + """ </col>
<col f='yes'>"""+ rml_obj.formatLang(prod_qtty) +' '+ product_uom_name +"""</col>
<col f='yes'>"""+ rml_obj.formatLang(float(main_strd_price)) +' '+ (company_currency_symbol) +"""</col>
<col f='yes'>""" + main_sp_price + sellers_price + """</col>'"""
xml += '</row>'
return xml, sum, sum_strd
def process_workcenter(wrk):
workcenter = workcenter_pool.browse(cr, uid, wrk['workcenter_id'])
cost_cycle = wrk['cycle']*workcenter.costs_cycle
cost_hour = wrk['hour']*workcenter.costs_hour
total = cost_cycle + cost_hour
xml = '<row>'
xml += "<col para='yes'>" + to_xml(workcenter.name) + '</col>'
xml += "<col/>"
xml += """<col f='yes'>"""+rml_obj.formatLang(cost_cycle)+' '+ (company_currency_symbol) + """</col>"""
xml += """<col f='yes'>"""+rml_obj.formatLang(cost_hour)+' '+ (company_currency_symbol) + """</col>"""
xml += """<col f='yes'>"""+rml_obj.formatLang(cost_hour + cost_cycle)+' '+ (company_currency_symbol) + """</col>"""
xml += '</row>'
return xml, total
xml = ''
config_start = """
<config>
<date>""" + to_xml(rml_obj.formatLang(datetime.now().strftime('%Y-%m-%d %H:%M:%S'),date_time=True)) + """</date>
<company>%s</company>
<PageSize>210.00mm,297.00mm</PageSize>
<PageWidth>595.27</PageWidth>
<PageHeight>841.88</PageHeight>
<tableSize>55.00mm,58.00mm,29.00mm,29.00mm,29.00mm</tableSize>
""" % to_xml(user_pool.browse(cr, uid, uid).company_id.name)
config_stop = """
<report-footer>Generated by OpenERP</report-footer>
</config>
"""
workcenter_header = """
<lines style='header'>
<row>
<col>%s</col>
<col t='yes'/>
<col t='yes'>%s</col>
<col t='yes'>%s</col>
<col t='yes'>%s</col>
</row>
</lines>
""" % (_('Work Center name'), _('Cycles Cost'), _('Hourly Cost'),_('Work Cost'))
prod_header = """
<row>
<col>%s</col>
<col>%s</col>
<col t='yes'>%s</col>
<col t='yes'>%s</col>
<col t='yes'>%s</col>
</row>
""" % (_('Components'), _('Components suppliers'), _('Quantity'),_('Cost Price per Unit of Measure'), _('Supplier Price per Unit of Measure'))
purchase_price_digits = rml_obj.get_digits(dp='Product Price')
for product in product_pool.browse(cr, uid, ids, context=context):
product_uom_name = to_xml(product.uom_id.name)
bom_id = bom_pool._bom_find(cr, uid, product.uom_id.id, product_id=product.id)
title = "<title>%s</title>" %(_("Cost Structure"))
title += "<title>%s</title>" % (to_xml(product.name))
xml += "<lines style='header'>" + title + prod_header + "</lines>"
if not bom_id:
total_strd = number * product.standard_price
total = number * product_pool.price_get(cr, uid, [product.id], 'standard_price')[product.id]
xml += """<lines style='lines'><row>
<col para='yes'>-</col>
<col para='yes'>-</col>
<col para='yes'>-</col>
<col para='yes'>-</col>
<col para='yes'>-</col>
</row></lines>"""
xml += """<lines style='total'> <row>
<col> """ + _('Total Cost of %s %s') % (str(number), product_uom_name) + """: </col>
<col/>
<col f='yes'/>
<col t='yes'>"""+ rml_obj.formatLang(total_strd, digits=purchase_price_digits) +' '+ (company_currency_symbol) + """</col>
<col t='yes'>"""+ rml_obj.formatLang(total, digits=purchase_price_digits) +' '+ (company_currency_symbol) + """</col>
</row></lines>'"""
else:
bom = bom_pool.browse(cr, uid, bom_id, context=context)
factor = number * product.uom_id.factor / bom.product_uom.factor
sub_boms = bom_pool._bom_explode(cr, uid, bom, product, factor / bom.product_qty)
total = 0
total_strd = 0
parent_bom = {
'product_qty': bom.product_qty,
'name': bom.product_id.name,
'product_uom': bom.product_uom.id,
'product_id': bom.product_id.id
}
xml_tmp = ''
for sub_bom in (sub_boms and sub_boms[0]) or [parent_bom]:
txt, sum, sum_strd = process_bom(sub_bom, company_currency.id)
xml_tmp += txt
total += sum
total_strd += sum_strd
xml += "<lines style='lines'>" + xml_tmp + '</lines>'
xml += """<lines style='sub_total'> <row>
<col> """ + _('Components Cost of %s %s') % (str(number), product_uom_name) + """: </col>
<col/>
<col t='yes'/>
<col t='yes'>"""+ rml_obj.formatLang(total_strd, digits=purchase_price_digits) +' '+ (company_currency_symbol) + """</col>
<col t='yes'></col>
</row></lines>'"""
total2 = 0
xml_tmp = ''
for wrk in (sub_boms and sub_boms[1]):
txt, sum = process_workcenter(wrk)
xml_tmp += txt
total2 += sum
if xml_tmp:
xml += workcenter_header
xml += "<lines style='lines'>" + xml_tmp + '</lines>'
xml += """<lines style='sub_total'> <row>
<col> """ + _('Work Cost of %s %s') % (str(number), product_uom_name) +""": </col>
<col/>
<col/>
<col/>
<col t='yes'>"""+ rml_obj.formatLang(total2, digits=purchase_price_digits) +' '+ (company_currency_symbol) +"""</col>
</row></lines>'"""
xml += """<lines style='total'> <row>
<col> """ + _('Total Cost of %s %s') % (str(number), product_uom_name) + """: </col>
<col/>
<col t='yes'/>
<col t='yes'>"""+ rml_obj.formatLang(total_strd+total2, digits=purchase_price_digits) +' '+ (company_currency_symbol) + """</col>
<col t='yes'></col>
</row></lines>'"""
xml = '<?xml version="1.0" ?><report>' + config_start + config_stop + xml + '</report>'
return xml
report_custom('report.product.price', 'product.product', '', 'addons/mrp/report/price.xsl')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -6,538,614,277,741,605,000 | 8,115,781,426,180,526,000 | 49.800885 | 150 | 0.489591 | false |
jobscore/sync-engine | inbox/test/api/test_auth.py | 3 | 1866 | import json
from base64 import b64encode
from inbox.test.util.base import generic_account, db # noqa
from inbox.test.api.base import new_api_client # noqa
def test_no_auth(db, generic_account): # noqa
# Because we're using the generic_account namespace
api_client = new_api_client(db, generic_account.namespace)
api_client.auth_header = {}
response = api_client.get_raw('/account')
assert response.status_code == 401
def test_basic_auth(db, generic_account): # noqa
api_client = new_api_client(db, generic_account.namespace)
response = api_client.get_raw('/account')
assert response.status_code == 200
resp_data = json.loads(response.data)
assert resp_data['id'] == generic_account.namespace.public_id
def test_bearer_token_auth(db, generic_account): # noqa
api_client = new_api_client(db, generic_account.namespace)
api_client.auth_header = {
'Authorization': 'Bearer {}'
.format(generic_account.namespace.public_id)}
response = api_client.get_raw('/account')
assert response.status_code == 200
resp_data = json.loads(response.data)
assert resp_data['id'] == generic_account.namespace.public_id
BAD_TOKEN = '1234567890abcdefg'
def test_invalid_basic_auth(db, generic_account): # noqa
api_client = new_api_client(db, generic_account.namespace)
api_client.auth_header = {'Authorization': 'Basic {}'
.format(b64encode(BAD_TOKEN + ':'))}
response = api_client.get_raw('/account')
assert response.status_code == 401
def test_invalid_bearer_token_auth(db, generic_account): # noqa
api_client = new_api_client(db, generic_account.namespace)
api_client.auth_header = {
'Authorization': 'Bearer {}'.format(BAD_TOKEN)}
response = api_client.get_raw('/account')
assert response.status_code == 401
| agpl-3.0 | 2,818,497,296,905,191,400 | 7,188,707,627,082,039,000 | 30.1 | 66 | 0.678992 | false |
utecht/random-lines | random-lines.py | 1 | 1121 | #!/usr/bin/env python
import random
import argparse
import sys
parser = argparse.ArgumentParser(description='Return random lines of file')
parser.add_argument('file', type=argparse.FileType('r'), help='the input file')
parser.add_argument('-n', '--num', type=int, help='number of lines to return')
parser.add_argument('-p', '--percent', type=float, help='percent of lines to return, i.e. 0.1 for 10 percent')
parser.add_argument('-o', '--output', type=argparse.FileType('w'), help='an output file')
args = parser.parse_args()
if args.num is None and args.percent is None:
print('Need a num or percent')
exit(1)
elif args.num and args.percent:
print('Only pass a num or a percent')
exit(1)
lines_pulled = 0
num_lines = sum(1 for line in open(args.file.name))
if args.num:
lines_pulled = args.num
elif args.percent:
lines_pulled = int(num_lines * args.percent)
if args.output:
output = args.output
else:
output = sys.stdout
pull_lines = [random.randint(1, num_lines) for _ in range(lines_pulled)]
for i, line in enumerate(args.file):
if i in pull_lines:
output.write(line)
| gpl-2.0 | 6,262,615,483,473,180,000 | -123,028,346,238,177,150 | 30.138889 | 110 | 0.693131 | false |
ptisserand/ansible | lib/ansible/modules/cloud/amazon/route53_health_check.py | 23 | 13078 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: route53_health_check
short_description: add or delete health-checks in Amazons Route53 DNS service
description:
- Creates and deletes DNS Health checks in Amazons Route53 service
- Only the port, resource_path, string_match and request_interval are
considered when updating existing health-checks.
version_added: "2.0"
options:
state:
description:
- Specifies the action to take.
required: true
choices: [ 'present', 'absent' ]
ip_address:
description:
- IP address of the end-point to check. Either this or `fqdn` has to be
provided.
port:
description:
- The port on the endpoint on which you want Amazon Route 53 to perform
health checks. Required for TCP checks.
type:
description:
- The type of health check that you want to create, which indicates how
Amazon Route 53 determines whether an endpoint is healthy.
required: true
choices: [ 'HTTP', 'HTTPS', 'HTTP_STR_MATCH', 'HTTPS_STR_MATCH', 'TCP' ]
resource_path:
description:
- The path that you want Amazon Route 53 to request when performing
health checks. The path can be any value for which your endpoint will
return an HTTP status code of 2xx or 3xx when the endpoint is healthy,
for example the file /docs/route53-health-check.html.
- Required for all checks except TCP.
- The path must begin with a /
- Maximum 255 characters.
fqdn:
description:
- Domain name of the endpoint to check. Either this or `ip_address` has
to be provided. When both are given the `fqdn` is used in the `Host:`
header of the HTTP request.
string_match:
description:
- If the check type is HTTP_STR_MATCH or HTTP_STR_MATCH, the string
that you want Amazon Route 53 to search for in the response body from
the specified resource. If the string appears in the first 5120 bytes
of the response body, Amazon Route 53 considers the resource healthy.
request_interval:
description:
- The number of seconds between the time that Amazon Route 53 gets a
response from your endpoint and the time that it sends the next
health-check request.
required: true
default: 30
choices: [ 10, 30 ]
failure_threshold:
description:
- The number of consecutive health checks that an endpoint must pass or
fail for Amazon Route 53 to change the current status of the endpoint
from unhealthy to healthy or vice versa.
required: true
default: 3
choices: [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ]
author: "zimbatm (@zimbatm)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Create a health-check for host1.example.com and use it in record
- route53_health_check:
state: present
fqdn: host1.example.com
type: HTTP_STR_MATCH
resource_path: /
string_match: "Hello"
request_interval: 10
failure_threshold: 2
register: my_health_check
- route53:
action: create
zone: "example.com"
type: CNAME
record: "www.example.com"
value: host1.example.com
ttl: 30
# Routing policy
identifier: "host1@www"
weight: 100
health_check: "{{ my_health_check.health_check.id }}"
# Delete health-check
- route53_health_check:
state: absent
fqdn: host1.example.com
'''
import uuid
try:
import boto
import boto.ec2
from boto import route53
from boto.route53 import Route53Connection, exception
from boto.route53.healthcheck import HealthCheck
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
# import module snippets
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import ec2_argument_spec, get_aws_connection_info
# Things that can't get changed:
# protocol
# ip_address or domain
# request_interval
# string_match if not previously enabled
def find_health_check(conn, wanted):
"""Searches for health checks that have the exact same set of immutable values"""
for check in conn.get_list_health_checks().HealthChecks:
config = check.HealthCheckConfig
if (
config.get('IPAddress') == wanted.ip_addr and
config.get('FullyQualifiedDomainName') == wanted.fqdn and
config.get('Type') == wanted.hc_type and
config.get('RequestInterval') == str(wanted.request_interval) and
config.get('Port') == str(wanted.port)
):
return check
return None
def to_health_check(config):
return HealthCheck(
config.get('IPAddress'),
int(config.get('Port')),
config.get('Type'),
config.get('ResourcePath'),
fqdn=config.get('FullyQualifiedDomainName'),
string_match=config.get('SearchString'),
request_interval=int(config.get('RequestInterval')),
failure_threshold=int(config.get('FailureThreshold')),
)
def health_check_diff(a, b):
a = a.__dict__
b = b.__dict__
if a == b:
return {}
diff = {}
for key in set(a.keys()) | set(b.keys()):
if a.get(key) != b.get(key):
diff[key] = b.get(key)
return diff
def to_template_params(health_check):
params = {
'ip_addr_part': '',
'port': health_check.port,
'type': health_check.hc_type,
'resource_path_part': '',
'fqdn_part': '',
'string_match_part': '',
'request_interval': health_check.request_interval,
'failure_threshold': health_check.failure_threshold,
}
if health_check.ip_addr:
params['ip_addr_part'] = HealthCheck.XMLIpAddrPart % {'ip_addr': health_check.ip_addr}
if health_check.resource_path:
params['resource_path_part'] = XMLResourcePathPart % {'resource_path': health_check.resource_path}
if health_check.fqdn:
params['fqdn_part'] = HealthCheck.XMLFQDNPart % {'fqdn': health_check.fqdn}
if health_check.string_match:
params['string_match_part'] = HealthCheck.XMLStringMatchPart % {'string_match': health_check.string_match}
return params
XMLResourcePathPart = """<ResourcePath>%(resource_path)s</ResourcePath>"""
POSTXMLBody = """
<CreateHealthCheckRequest xmlns="%(xmlns)s">
<CallerReference>%(caller_ref)s</CallerReference>
<HealthCheckConfig>
%(ip_addr_part)s
<Port>%(port)s</Port>
<Type>%(type)s</Type>
%(resource_path_part)s
%(fqdn_part)s
%(string_match_part)s
<RequestInterval>%(request_interval)s</RequestInterval>
<FailureThreshold>%(failure_threshold)s</FailureThreshold>
</HealthCheckConfig>
</CreateHealthCheckRequest>
"""
UPDATEHCXMLBody = """
<UpdateHealthCheckRequest xmlns="%(xmlns)s">
<HealthCheckVersion>%(health_check_version)s</HealthCheckVersion>
%(ip_addr_part)s
<Port>%(port)s</Port>
%(resource_path_part)s
%(fqdn_part)s
%(string_match_part)s
<FailureThreshold>%(failure_threshold)i</FailureThreshold>
</UpdateHealthCheckRequest>
"""
def create_health_check(conn, health_check, caller_ref=None):
if caller_ref is None:
caller_ref = str(uuid.uuid4())
uri = '/%s/healthcheck' % conn.Version
params = to_template_params(health_check)
params.update(xmlns=conn.XMLNameSpace, caller_ref=caller_ref)
xml_body = POSTXMLBody % params
response = conn.make_request('POST', uri, {'Content-Type': 'text/xml'}, xml_body)
body = response.read()
boto.log.debug(body)
if response.status == 201:
e = boto.jsonresponse.Element()
h = boto.jsonresponse.XmlHandler(e, None)
h.parse(body)
return e
else:
raise exception.DNSServerError(response.status, response.reason, body)
def update_health_check(conn, health_check_id, health_check_version, health_check):
uri = '/%s/healthcheck/%s' % (conn.Version, health_check_id)
params = to_template_params(health_check)
params.update(
xmlns=conn.XMLNameSpace,
health_check_version=health_check_version,
)
xml_body = UPDATEHCXMLBody % params
response = conn.make_request('POST', uri, {'Content-Type': 'text/xml'}, xml_body)
body = response.read()
boto.log.debug(body)
if response.status not in (200, 204):
raise exception.DNSServerError(response.status,
response.reason,
body)
e = boto.jsonresponse.Element()
h = boto.jsonresponse.XmlHandler(e, None)
h.parse(body)
return e
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state=dict(choices=['present', 'absent'], default='present'),
ip_address=dict(),
port=dict(type='int'),
type=dict(required=True, choices=['HTTP', 'HTTPS', 'HTTP_STR_MATCH', 'HTTPS_STR_MATCH', 'TCP']),
resource_path=dict(),
fqdn=dict(),
string_match=dict(),
request_interval=dict(type='int', choices=[10, 30], default=30),
failure_threshold=dict(type='int', choices=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10], default=3),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto 2.27.0+ required for this module')
state_in = module.params.get('state')
ip_addr_in = module.params.get('ip_address')
port_in = module.params.get('port')
type_in = module.params.get('type')
resource_path_in = module.params.get('resource_path')
fqdn_in = module.params.get('fqdn')
string_match_in = module.params.get('string_match')
request_interval_in = module.params.get('request_interval')
failure_threshold_in = module.params.get('failure_threshold')
if ip_addr_in is None and fqdn_in is None:
module.fail_json(msg="parameter 'ip_address' or 'fqdn' is required")
# Default port
if port_in is None:
if type_in in ['HTTP', 'HTTP_STR_MATCH']:
port_in = 80
elif type_in in ['HTTPS', 'HTTPS_STR_MATCH']:
port_in = 443
else:
module.fail_json(msg="parameter 'port' is required for 'type' TCP")
# string_match in relation with type
if type_in in ['HTTP_STR_MATCH', 'HTTPS_STR_MATCH']:
if string_match_in is None:
module.fail_json(msg="parameter 'string_match' is required for the HTTP(S)_STR_MATCH types")
elif len(string_match_in) > 255:
module.fail_json(msg="parameter 'string_match' is limited to 255 characters max")
elif string_match_in:
module.fail_json(msg="parameter 'string_match' argument is only for the HTTP(S)_STR_MATCH types")
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
# connect to the route53 endpoint
try:
conn = Route53Connection(**aws_connect_kwargs)
except boto.exception.BotoServerError as e:
module.fail_json(msg=e.error_message)
changed = False
action = None
check_id = None
wanted_config = HealthCheck(ip_addr_in, port_in, type_in, resource_path_in, fqdn_in, string_match_in, request_interval_in, failure_threshold_in)
existing_check = find_health_check(conn, wanted_config)
if existing_check:
check_id = existing_check.Id
existing_config = to_health_check(existing_check.HealthCheckConfig)
if state_in == 'present':
if existing_check is None:
action = "create"
check_id = create_health_check(conn, wanted_config).HealthCheck.Id
changed = True
else:
diff = health_check_diff(existing_config, wanted_config)
if diff:
action = "update"
update_health_check(conn, existing_check.Id, int(existing_check.HealthCheckVersion), wanted_config)
changed = True
elif state_in == 'absent':
if check_id:
action = "delete"
conn.delete_health_check(check_id)
changed = True
else:
module.fail_json(msg="Logic Error: Unknown state")
module.exit_json(changed=changed, health_check=dict(id=check_id), action=action)
if __name__ == '__main__':
main()
| gpl-3.0 | 5,562,700,793,096,470,000 | 3,663,729,821,102,002,000 | 34.538043 | 148 | 0.644288 | false |
ldiary/pytest-testbook | setup.py | 1 | 1591 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
import codecs
from setuptools import setup
directory_name = os.path.dirname(__file__)
with codecs.open(os.path.join(directory_name, 'pytest_testbook', '__init__.py'), encoding='utf-8') as fd:
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(fd.read()).group(1)
def read(fname):
file_path = os.path.join(directory_name, fname)
return codecs.open(file_path, encoding='utf-8').read()
setup(
name='pytest-testbook',
version=VERSION,
author='Ernesto D. Luzon Jr.',
author_email='raise_a_bug_in_myrepo@github.com',
maintainer='Ernesto D. Luzon Jr.',
maintainer_email='please_raise_a_bug_in_myrepo@github.com',
license='MIT',
url='https://github.com/ldiary/pytest-testbook',
description='A plugin to run tests written in Jupyter notebook',
long_description=read('README.rst'),
packages=["pytest_testbook"],
install_requires=[
'marigoso',
'jupyter',
],
classifiers=[
"Development Status :: 3 - Alpha",
'Framework :: Pytest',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
],
entry_points={
'pytest11': [
'testbook = pytest_testbook.plugin',
],
},
)
| mit | -1,820,886,716,997,720,300 | 3,207,419,318,121,972,000 | 30.196078 | 105 | 0.616593 | false |
piranna/pyfilesystem | fs/s3fs.py | 7 | 29233 | """
fs.s3fs
=======
**Currently only avaiable on Python2 due to boto not being available for Python3**
FS subclass accessing files in Amazon S3
This module provides the class 'S3FS', which implements the FS filesystem
interface for objects stored in Amazon Simple Storage Service (S3).
"""
import os
import datetime
import tempfile
from fnmatch import fnmatch
import stat as statinfo
import boto.s3.connection
from boto.s3.prefix import Prefix
from boto.exception import S3ResponseError
from fs.base import *
from fs.path import *
from fs.errors import *
from fs.remote import *
from fs.filelike import LimitBytesFile
from fs import iotools
import six
# Boto is not thread-safe, so we need to use a per-thread S3 connection.
if hasattr(threading,"local"):
thread_local = threading.local
else:
class thread_local(object):
def __init__(self):
self._map = {}
def __getattr__(self,attr):
try:
return self._map[(threading.currentThread(),attr)]
except KeyError:
raise AttributeError, attr
def __setattr__(self,attr,value):
self._map[(threading.currentThread(),attr)] = value
class S3FS(FS):
"""A filesystem stored in Amazon S3.
This class provides the FS interface for files stored in Amazon's Simple
Storage Service (S3). It should be instantiated with the name of the
S3 bucket to use, and optionally a prefix under which the files should
be stored.
Local temporary files are used when opening files from this filesystem,
and any changes are only pushed back into S3 when the files are closed
or flushed.
"""
_meta = {'thread_safe': True,
'virtual': False,
'read_only': False,
'unicode_paths': True,
'case_insensitive_paths': False,
'network': True,
'atomic.move': True,
'atomic.copy': True,
'atomic.makedir': True,
'atomic.rename': False,
'atomic.setcontents': True
}
class meta:
PATH_MAX = None
NAME_MAX = None
def __init__(self, bucket, prefix="", aws_access_key=None, aws_secret_key=None, separator="/", thread_synchronize=True, key_sync_timeout=1):
"""Constructor for S3FS objects.
S3FS objects require the name of the S3 bucket in which to store
files, and can optionally be given a prefix under which the files
should be stored. The AWS public and private keys may be specified
as additional arguments; if they are not specified they will be
read from the two environment variables AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY.
The keyword argument 'key_sync_timeout' specifies the maximum
time in seconds that the filesystem will spend trying to confirm
that a newly-uploaded S3 key is available for reading. For no
timeout set it to zero. To disable these checks entirely (and
thus reduce the filesystem's consistency guarantees to those of
S3's "eventual consistency" model) set it to None.
By default the path separator is "/", but this can be overridden
by specifying the keyword 'separator' in the constructor.
"""
self._bucket_name = bucket
self._access_keys = (aws_access_key,aws_secret_key)
self._separator = separator
self._key_sync_timeout = key_sync_timeout
# Normalise prefix to this form: path/to/files/
prefix = normpath(prefix)
while prefix.startswith(separator):
prefix = prefix[1:]
if not prefix.endswith(separator) and prefix != "":
prefix = prefix + separator
if isinstance(prefix,unicode):
prefix = prefix.encode("utf8")
if aws_access_key is None:
if "AWS_ACCESS_KEY_ID" not in os.environ:
raise CreateFailedError("AWS_ACCESS_KEY_ID not set")
if aws_secret_key is None:
if "AWS_SECRET_ACCESS_KEY" not in os.environ:
raise CreateFailedError("AWS_SECRET_ACCESS_KEY not set")
self._prefix = prefix
self._tlocal = thread_local()
super(S3FS, self).__init__(thread_synchronize=thread_synchronize)
# Make _s3conn and _s3bukt properties that are created on demand,
# since they cannot be stored during pickling.
def _s3conn(self):
try:
(c,ctime) = self._tlocal.s3conn
if time.time() - ctime > 60:
raise AttributeError
return c
except AttributeError:
c = boto.s3.connection.S3Connection(*self._access_keys)
self._tlocal.s3conn = (c,time.time())
return c
_s3conn = property(_s3conn)
def _s3bukt(self):
try:
(b,ctime) = self._tlocal.s3bukt
if time.time() - ctime > 60:
raise AttributeError
return b
except AttributeError:
try:
# Validate by listing the bucket if there is no prefix.
# If there is a prefix, validate by listing only the prefix
# itself, to avoid errors when an IAM policy has been applied.
if self._prefix:
b = self._s3conn.get_bucket(self._bucket_name, validate=0)
b.get_key(self._prefix)
else:
b = self._s3conn.get_bucket(self._bucket_name, validate=1)
except S3ResponseError, e:
if "404 Not Found" not in str(e):
raise
b = self._s3conn.create_bucket(self._bucket_name)
self._tlocal.s3bukt = (b,time.time())
return b
_s3bukt = property(_s3bukt)
def __getstate__(self):
state = super(S3FS,self).__getstate__()
del state['_tlocal']
return state
def __setstate__(self,state):
super(S3FS,self).__setstate__(state)
self._tlocal = thread_local()
def __repr__(self):
args = (self.__class__.__name__,self._bucket_name,self._prefix)
return '<%s: %s:%s>' % args
__str__ = __repr__
def _s3path(self,path):
"""Get the absolute path to a file stored in S3."""
path = relpath(normpath(path))
path = self._separator.join(iteratepath(path))
s3path = self._prefix + path
if s3path and s3path[-1] == self._separator:
s3path = s3path[:-1]
if isinstance(s3path,unicode):
s3path = s3path.encode("utf8")
return s3path
def _uns3path(self,s3path,roots3path=None):
"""Get the local path for a file stored in S3.
This is essentially the opposite of self._s3path().
"""
if roots3path is None:
roots3path = self._s3path("")
i = len(roots3path)
return s3path[i:]
def _sync_key(self,k):
"""Synchronise on contents of the given key.
Since S3 only offers "eventual consistency" of data, it is possible
to create a key but be unable to read it back straight away. This
method works around that limitation by polling the key until it reads
back the value expected by the given key.
Note that this could easily fail if the key is modified by another
program, meaning the content will never be as specified in the given
key. This is the reason for the timeout argument to the construtcor.
"""
timeout = self._key_sync_timeout
if timeout is None:
return k
k2 = self._s3bukt.get_key(k.name)
t = time.time()
while k2 is None or k2.etag != k.etag:
if timeout > 0:
if t + timeout < time.time():
break
time.sleep(0.1)
k2 = self._s3bukt.get_key(k.name)
return k2
def _sync_set_contents(self,key,contents):
"""Synchronously set the contents of a key."""
if isinstance(key,basestring):
key = self._s3bukt.new_key(key)
if isinstance(contents,basestring):
key.set_contents_from_string(contents)
elif hasattr(contents,"md5"):
hexmd5 = contents.md5
b64md5 = hexmd5.decode("hex").encode("base64").strip()
key.set_contents_from_file(contents,md5=(hexmd5,b64md5))
else:
try:
contents.seek(0)
except (AttributeError,EnvironmentError):
tf = tempfile.TemporaryFile()
data = contents.read(524288)
while data:
tf.write(data)
data = contents.read(524288)
tf.seek(0)
key.set_contents_from_file(tf)
else:
key.set_contents_from_file(contents)
return self._sync_key(key)
def makepublic(self, path):
"""Mark given path as publicly accessible using HTTP(S)"""
s3path = self._s3path(path)
k = self._s3bukt.get_key(s3path)
k.make_public()
def getpathurl(self, path, allow_none=False, expires=3600):
"""Returns a url that corresponds to the given path."""
s3path = self._s3path(path)
k = self._s3bukt.get_key(s3path)
# Is there AllUsers group with READ permissions?
is_public = True in [grant.permission == 'READ' and
grant.uri == 'http://acs.amazonaws.com/groups/global/AllUsers'
for grant in k.get_acl().acl.grants]
url = k.generate_url(expires, force_http=is_public)
if url == None:
if not allow_none:
raise NoPathURLError(path=path)
return None
if is_public:
# Strip time token; it has no sense for public resource
url = url.split('?')[0]
return url
def setcontents(self, path, data=b'', encoding=None, errors=None, chunk_size=64*1024):
s3path = self._s3path(path)
if isinstance(data, six.text_type):
data = data.encode(encoding=encoding, errors=errors)
self._sync_set_contents(s3path, data)
@iotools.filelike_to_stream
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
"""Open the named file in the given mode.
This method downloads the file contents into a local temporary file
so that it can be worked on efficiently. Any changes made to the
file are only sent back to S3 when the file is flushed or closed.
"""
if self.isdir(path):
raise ResourceInvalidError(path)
s3path = self._s3path(path)
# Truncate the file if requested
if "w" in mode:
k = self._sync_set_contents(s3path,"")
else:
k = self._s3bukt.get_key(s3path)
if k is None:
# Create the file if it's missing
if "w" not in mode and "a" not in mode:
raise ResourceNotFoundError(path)
if not self.isdir(dirname(path)):
raise ParentDirectoryMissingError(path)
k = self._sync_set_contents(s3path,"")
# Make sure nothing tries to read past end of socket data
f = LimitBytesFile(k.size,k,"r")
# For streaming reads, return the key object directly
if mode == "r-":
return f
# For everything else, use a RemoteFileBuffer.
# This will take care of closing the socket when it's done.
return RemoteFileBuffer(self,path,mode,f)
def exists(self,path):
"""Check whether a path exists."""
s3path = self._s3path(path)
s3pathD = s3path + self._separator
# The root directory always exists
if self._prefix.startswith(s3path):
return True
ks = self._s3bukt.list(prefix=s3path,delimiter=self._separator)
for k in ks:
# A regular file
if _eq_utf8(k.name,s3path):
return True
# A directory
if _eq_utf8(k.name,s3pathD):
return True
return False
def isdir(self,path):
"""Check whether a path exists and is a directory."""
s3path = self._s3path(path) + self._separator
# Root is always a directory
if s3path == "/" or s3path == self._prefix:
return True
# Use a list request so that we return true if there are any files
# in that directory. This avoids requiring a special file for the
# the directory itself, which other tools may not create.
ks = self._s3bukt.list(prefix=s3path,delimiter=self._separator)
try:
iter(ks).next()
except StopIteration:
return False
else:
return True
def isfile(self,path):
"""Check whether a path exists and is a regular file."""
s3path = self._s3path(path)
# Root is never a file
if self._prefix.startswith(s3path):
return False
k = self._s3bukt.get_key(s3path)
if k is not None:
return True
return False
def listdir(self,path="./",wildcard=None,full=False,absolute=False,
dirs_only=False,files_only=False):
"""List contents of a directory."""
return list(self.ilistdir(path,wildcard,full,absolute,
dirs_only,files_only))
def listdirinfo(self,path="./",wildcard=None,full=False,absolute=False,
dirs_only=False,files_only=False):
return list(self.ilistdirinfo(path,wildcard,full,absolute,
dirs_only,files_only))
def ilistdir(self,path="./",wildcard=None,full=False,absolute=False,
dirs_only=False,files_only=False):
"""List contents of a directory."""
keys = self._iter_keys(path)
entries = self._filter_keys(path,keys,wildcard,full,absolute,
dirs_only,files_only)
return (nm for (nm,k) in entries)
def ilistdirinfo(self,path="./",wildcard=None,full=False,absolute=False,
dirs_only=False,files_only=False):
keys = self._iter_keys(path)
entries = self._filter_keys(path,keys,wildcard,full,absolute,
dirs_only,files_only)
return ((nm,self._get_key_info(k,nm)) for (nm,k) in entries)
def _iter_keys(self,path):
"""Iterator over keys contained in the given directory.
This generator yields (name,key) pairs for each entry in the given
directory. If the path is not a directory, it raises the approprate
error.
"""
s3path = self._s3path(path) + self._separator
if s3path == "/":
s3path = ""
isDir = False
for k in self._s3bukt.list(prefix=s3path,delimiter=self._separator):
if not isDir:
isDir = True
# Skip over the entry for the directory itself, if it exists
name = self._uns3path(k.name,s3path)
if name != "":
if not isinstance(name,unicode):
name = name.decode("utf8")
if name.endswith(self._separator):
name = name[:-1]
yield (name,k)
if not isDir:
if s3path != self._prefix:
if self.isfile(path):
msg = "that's not a directory: %(path)s"
raise ResourceInvalidError(path,msg=msg)
raise ResourceNotFoundError(path)
def _key_is_dir(self, k):
if isinstance(k,Prefix):
return True
if k.name.endswith(self._separator):
return True
return False
def _filter_keys(self,path,keys,wildcard,full,absolute,
dirs_only,files_only):
"""Filter out keys not matching the given criteria.
Given a (name,key) iterator as returned by _iter_keys, this method
applies the given filtering criteria and returns a filtered iterator.
"""
sep = self._separator
if dirs_only and files_only:
raise ValueError("dirs_only and files_only can not both be True")
if dirs_only:
keys = ((nm,k) for (nm,k) in keys if self._key_is_dir(k))
elif files_only:
keys = ((nm,k) for (nm,k) in keys if not self._key_is_dir(k))
if wildcard is not None:
if callable(wildcard):
keys = ((nm,k) for (nm,k) in keys if wildcard(nm))
else:
keys = ((nm,k) for (nm,k) in keys if fnmatch(nm,wildcard))
if full:
return ((relpath(pathjoin(path, nm)),k) for (nm,k) in keys)
elif absolute:
return ((abspath(pathjoin(path, nm)),k) for (nm,k) in keys)
return keys
def makedir(self,path,recursive=False,allow_recreate=False):
"""Create a directory at the given path.
The 'mode' argument is accepted for compatibility with the standard
FS interface, but is currently ignored.
"""
s3path = self._s3path(path)
s3pathD = s3path + self._separator
if s3pathD == self._prefix:
if allow_recreate:
return
msg = "Can not create a directory that already exists"\
" (try allow_recreate=True): %(path)s"
raise DestinationExistsError(path, msg=msg)
s3pathP = self._s3path(dirname(path))
if s3pathP:
s3pathP = s3pathP + self._separator
# Check various preconditions using list of parent dir
ks = self._s3bukt.list(prefix=s3pathP,delimiter=self._separator)
if s3pathP == self._prefix:
parentExists = True
else:
parentExists = False
for k in ks:
if not parentExists:
parentExists = True
if _eq_utf8(k.name,s3path):
# It's already a file
msg = "Destination exists as a regular file: %(path)s"
raise ResourceInvalidError(path, msg=msg)
if _eq_utf8(k.name,s3pathD):
# It's already a directory
if allow_recreate:
return
msg = "Can not create a directory that already exists"\
" (try allow_recreate=True): %(path)s"
raise DestinationExistsError(path, msg=msg)
# Create parent if required
if not parentExists:
if recursive:
self.makedir(dirname(path),recursive,allow_recreate)
else:
msg = "Parent directory does not exist: %(path)s"
raise ParentDirectoryMissingError(path, msg=msg)
# Create an empty file representing the directory
self._sync_set_contents(s3pathD,"")
def remove(self,path):
"""Remove the file at the given path."""
s3path = self._s3path(path)
ks = self._s3bukt.list(prefix=s3path,delimiter=self._separator)
for k in ks:
if _eq_utf8(k.name,s3path):
break
if _startswith_utf8(k.name,s3path + "/"):
msg = "that's not a file: %(path)s"
raise ResourceInvalidError(path,msg=msg)
else:
raise ResourceNotFoundError(path)
self._s3bukt.delete_key(s3path)
k = self._s3bukt.get_key(s3path)
while k:
k = self._s3bukt.get_key(s3path)
def removedir(self,path,recursive=False,force=False):
"""Remove the directory at the given path."""
if normpath(path) in ('', '/'):
raise RemoveRootError(path)
s3path = self._s3path(path)
if s3path != self._prefix:
s3path = s3path + self._separator
if force:
# If we will be forcibly removing any directory contents, we
# might as well get the un-delimited list straight away.
ks = self._s3bukt.list(prefix=s3path)
else:
ks = self._s3bukt.list(prefix=s3path,delimiter=self._separator)
# Fail if the directory is not empty, or remove them if forced
found = False
for k in ks:
found = True
if not _eq_utf8(k.name,s3path):
if not force:
raise DirectoryNotEmptyError(path)
self._s3bukt.delete_key(k.name)
if not found:
if self.isfile(path):
msg = "removedir() called on a regular file: %(path)s"
raise ResourceInvalidError(path,msg=msg)
if path not in ("","/"):
raise ResourceNotFoundError(path)
self._s3bukt.delete_key(s3path)
if recursive and path not in ("","/"):
pdir = dirname(path)
try:
self.removedir(pdir,recursive=True,force=False)
except DirectoryNotEmptyError:
pass
def rename(self,src,dst):
"""Rename the file at 'src' to 'dst'."""
# Actually, in S3 'rename' is exactly the same as 'move'
if self.isfile(src):
self.move(src,dst)
else:
self.movedir(src,dst)
def getinfo(self,path):
s3path = self._s3path(path)
if path in ("","/"):
k = Prefix(bucket=self._s3bukt,name="/")
else:
k = self._s3bukt.get_key(s3path)
if k is None:
ks = self._s3bukt.list(prefix=s3path,delimiter=self._separator)
for k in ks:
if isinstance(k,Prefix):
break
else:
raise ResourceNotFoundError(path)
return self._get_key_info(k,path)
def _get_key_info(self,key,name=None):
info = {}
if name is not None:
info["name"] = basename(name)
else:
info["name"] = basename(self._uns3key(k.name))
if self._key_is_dir(key):
info["st_mode"] = 0700 | statinfo.S_IFDIR
else:
info["st_mode"] = 0700 | statinfo.S_IFREG
if hasattr(key,"size"):
info['size'] = int(key.size)
etag = getattr(key,"etag",None)
if etag is not None:
if isinstance(etag,unicode):
etag = etag.encode("utf8")
info['etag'] = etag.strip('"').strip("'")
if hasattr(key,"last_modified"):
# TODO: does S3 use any other formats?
fmt = "%a, %d %b %Y %H:%M:%S %Z"
try:
mtime = datetime.datetime.strptime(key.last_modified,fmt)
info['modified_time'] = mtime
except ValueError:
pass
return info
def desc(self,path):
return "No description available"
def copy(self,src,dst,overwrite=False,chunk_size=16384):
"""Copy a file from 'src' to 'dst'.
src -- The source path
dst -- The destination path
overwrite -- If True, then the destination may be overwritten
(if a file exists at that location). If False then an exception will be
thrown if the destination exists
chunk_size -- Size of chunks to use in copy (ignored by S3)
"""
s3path_dst = self._s3path(dst)
s3path_dstD = s3path_dst + self._separator
# Check for various preconditions.
ks = self._s3bukt.list(prefix=s3path_dst,delimiter=self._separator)
dstOK = False
for k in ks:
# It exists as a regular file
if _eq_utf8(k.name,s3path_dst):
if not overwrite:
raise DestinationExistsError(dst)
dstOK = True
break
# Check if it refers to a directory. If so, we copy *into* it.
# Since S3 lists in lexicographic order, subsequent iterations
# of the loop will check for the existence of the new filename.
if _eq_utf8(k.name,s3path_dstD):
nm = basename(src)
dst = pathjoin(dirname(dst),nm)
s3path_dst = s3path_dstD + nm
dstOK = True
if not dstOK and not self.isdir(dirname(dst)):
msg = "Destination directory does not exist: %(path)s"
raise ParentDirectoryMissingError(dst,msg=msg)
# OK, now we can copy the file.
s3path_src = self._s3path(src)
try:
self._s3bukt.copy_key(s3path_dst,self._bucket_name,s3path_src)
except S3ResponseError, e:
if "404 Not Found" in str(e):
msg = "Source is not a file: %(path)s"
raise ResourceInvalidError(src, msg=msg)
raise e
else:
k = self._s3bukt.get_key(s3path_dst)
while k is None:
k = self._s3bukt.get_key(s3path_dst)
self._sync_key(k)
def move(self,src,dst,overwrite=False,chunk_size=16384):
"""Move a file from one location to another."""
self.copy(src,dst,overwrite=overwrite)
self._s3bukt.delete_key(self._s3path(src))
def walkfiles(self,
path="/",
wildcard=None,
dir_wildcard=None,
search="breadth",
ignore_errors=False ):
if search != "breadth" or dir_wildcard is not None:
args = (wildcard,dir_wildcard,search,ignore_errors)
for item in super(S3FS,self).walkfiles(path,*args):
yield item
else:
prefix = self._s3path(path)
for k in self._s3bukt.list(prefix=prefix):
name = relpath(self._uns3path(k.name,prefix))
if name != "":
if not isinstance(name,unicode):
name = name.decode("utf8")
if not k.name.endswith(self._separator):
if wildcard is not None:
if callable(wildcard):
if not wildcard(basename(name)):
continue
else:
if not fnmatch(basename(name),wildcard):
continue
yield pathjoin(path,name)
def walkinfo(self,
path="/",
wildcard=None,
dir_wildcard=None,
search="breadth",
ignore_errors=False ):
if search != "breadth" or dir_wildcard is not None:
args = (wildcard,dir_wildcard,search,ignore_errors)
for item in super(S3FS,self).walkfiles(path,*args):
yield (item,self.getinfo(item))
else:
prefix = self._s3path(path)
for k in self._s3bukt.list(prefix=prefix):
name = relpath(self._uns3path(k.name,prefix))
if name != "":
if not isinstance(name,unicode):
name = name.decode("utf8")
if wildcard is not None:
if callable(wildcard):
if not wildcard(basename(name)):
continue
else:
if not fnmatch(basename(name),wildcard):
continue
yield (pathjoin(path,name),self._get_key_info(k,name))
def walkfilesinfo(self,
path="/",
wildcard=None,
dir_wildcard=None,
search="breadth",
ignore_errors=False ):
if search != "breadth" or dir_wildcard is not None:
args = (wildcard,dir_wildcard,search,ignore_errors)
for item in super(S3FS,self).walkfiles(path,*args):
yield (item,self.getinfo(item))
else:
prefix = self._s3path(path)
for k in self._s3bukt.list(prefix=prefix):
name = relpath(self._uns3path(k.name,prefix))
if name != "":
if not isinstance(name,unicode):
name = name.decode("utf8")
if not k.name.endswith(self._separator):
if wildcard is not None:
if callable(wildcard):
if not wildcard(basename(name)):
continue
else:
if not fnmatch(basename(name),wildcard):
continue
yield (pathjoin(path,name),self._get_key_info(k,name))
def _eq_utf8(name1,name2):
if isinstance(name1,unicode):
name1 = name1.encode("utf8")
if isinstance(name2,unicode):
name2 = name2.encode("utf8")
return name1 == name2
def _startswith_utf8(name1,name2):
if isinstance(name1,unicode):
name1 = name1.encode("utf8")
if isinstance(name2,unicode):
name2 = name2.encode("utf8")
return name1.startswith(name2)
| bsd-3-clause | -3,332,692,433,183,976,000 | 1,805,751,729,444,181,000 | 38.133869 | 144 | 0.552355 | false |
matthewwardrop/formulaic | formulaic/materializers/transforms/scale.py | 1 | 1389 | import numpy
import scipy.sparse as spsparse
from formulaic.utils.stateful_transforms import stateful_transform
@stateful_transform
def scale(data, center=True, scale=True, ddof=1, _state=None):
data = numpy.array(data)
if 'ddof' not in _state:
_state['ddof'] = ddof
else:
ddof = _state['ddof']
# Handle centering
if 'center' not in _state:
if isinstance(center, bool) and center:
_state['center'] = numpy.mean(data, axis=0)
elif not isinstance(center, bool):
_state['center'] = numpy.array(center)
else:
_state['center'] = None
if _state['center'] is not None:
data = data - _state['center']
# Handle scaling
if 'scale' not in _state:
if isinstance(scale, bool) and scale:
_state['scale'] = numpy.sqrt(numpy.sum(data ** 2, axis=0) / (data.shape[0] - ddof))
elif not isinstance(scale, bool):
_state['scale'] = numpy.array(scale)
else:
_state['scale'] = None
if _state['scale'] is not None:
data = data / _state['scale']
return data
@scale.register(spsparse.spmatrix)
def _(data, *args, **kwargs):
assert data.shape[1] == 1
return scale(data.toarray()[:, 0], *args, **kwargs)
@stateful_transform
def center(data, _state=None):
return scale(data, scale=False, _state=_state)
| mit | 9,106,757,892,124,963,000 | 1,791,174,693,020,939,800 | 26.78 | 95 | 0.596112 | false |
erilyth/sugar | src/jarabe/view/pulsingicon.py | 1 | 7328 | # Copyright (C) 2008 One Laptop Per Child
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import math
from gi.repository import GObject
from sugar3.graphics.icon import Icon
from sugar3.graphics import style
from sugar3.graphics.icon import CanvasIcon
_INTERVAL = 100
_STEP = math.pi / 10 # must be a fraction of pi, for clean caching
_MINIMAL_ALPHA_VALUE = 0.33
class Pulser(object):
def __init__(self, icon, interval=_INTERVAL):
self._pulse_hid = None
self._icon = icon
self._interval = interval
self._phase = 0
self._start_scale = 1.0
self._end_scale = 1.0
self._zoom_steps = 1
self._current_zoom_step = 1
self._current_scale_step = 1
def set_zooming(self, start_scale, end_scale, zoom_steps):
""" Set start and end scale and number of steps in zoom animation """
self._start_scale = start_scale
self._end_scale = end_scale
self._zoom_steps = zoom_steps
self._current_scale_step = abs(self._start_scale - self._end_scale) / \
self._zoom_steps
self._icon.scale = self._start_scale
def start(self, restart=False):
if restart:
self._phase = 0
if self._pulse_hid is None:
self._pulse_hid = GObject.timeout_add(self._interval,
self.__pulse_cb)
if self._start_scale != self._end_scale:
self._icon.scale = self._start_scale + \
self._current_scale_step * self._current_zoom_step
def stop(self):
if self._pulse_hid is not None:
GObject.source_remove(self._pulse_hid)
self._pulse_hid = None
self._icon.xo_color = self._icon.get_base_color()
self._phase = 0
self._icon.alpha = 1.0
def update(self):
self._icon.xo_color = self._icon.base_color
self._icon.alpha = _MINIMAL_ALPHA_VALUE + \
(1 - _MINIMAL_ALPHA_VALUE) * (math.cos(self._phase) + 1) / 2
def __pulse_cb(self):
self._phase += _STEP
if self._current_zoom_step <= self._zoom_steps and \
self._start_scale != self._end_scale:
self._icon.scale = self._start_scale + \
self._current_scale_step * self._current_zoom_step
self._current_zoom_step += 1
self.update()
return True
class PulsingIcon(Icon):
__gtype_name__ = 'SugarPulsingIcon'
def __init__(self, interval=_INTERVAL, **kwargs):
self._pulser = Pulser(self, interval)
self._base_color = None
self._pulse_color = None
self._paused = False
self._pulsing = False
Icon.__init__(self, **kwargs)
self._palette = None
self.connect('destroy', self.__destroy_cb)
def set_pulse_color(self, pulse_color):
self._pulse_color = pulse_color
self._pulser.update()
def get_pulse_color(self):
return self._pulse_color
pulse_color = GObject.property(
type=object, getter=get_pulse_color, setter=set_pulse_color)
def set_base_color(self, base_color):
self._base_color = base_color
self._pulser.update()
def get_base_color(self):
return self._base_color
def set_zooming(self, start_size=style.SMALL_ICON_SIZE,
end_size=style.XLARGE_ICON_SIZE,
zoom_steps=10):
if start_size > end_size:
start_scale = 1.0
end_scale = float(end_size) / start_size
else:
start_scale = float(start_size) / end_size
end_scale = 1.0
self._pulser.set_zooming(start_scale, end_scale, zoom_steps)
base_color = GObject.property(
type=object, getter=get_base_color, setter=set_base_color)
def set_paused(self, paused):
self._paused = paused
if self._paused:
self._pulser.stop()
else:
self._pulser.start(restart=False)
def get_paused(self):
return self._paused
paused = GObject.property(
type=bool, default=False, getter=get_paused, setter=set_paused)
def set_pulsing(self, pulsing):
self._pulsing = pulsing
if self._pulsing:
self._pulser.start(restart=True)
else:
self._pulser.stop()
def get_pulsing(self):
return self._pulsing
pulsing = GObject.property(
type=bool, default=False, getter=get_pulsing, setter=set_pulsing)
def _get_palette(self):
return self._palette
def _set_palette(self, palette):
if self._palette is not None:
self._palette.props.invoker = None
self._palette = palette
palette = property(_get_palette, _set_palette)
def __destroy_cb(self, icon):
self._pulser.stop()
if self._palette is not None:
self._palette.destroy()
class EventPulsingIcon(CanvasIcon):
__gtype_name__ = 'SugarEventPulsingIcon'
def __init__(self, interval=_INTERVAL, **kwargs):
self._pulser = Pulser(self, interval)
self._base_color = None
self._pulse_color = None
self._paused = False
self._pulsing = False
CanvasIcon.__init__(self, **kwargs)
self.connect('destroy', self.__destroy_cb)
def __destroy_cb(self, box):
self._pulser.stop()
def set_pulse_color(self, pulse_color):
self._pulse_color = pulse_color
self._pulser.update()
def get_pulse_color(self):
return self._pulse_color
pulse_color = GObject.property(
type=object, getter=get_pulse_color, setter=set_pulse_color)
def set_base_color(self, base_color):
self._base_color = base_color
self._pulser.update()
def get_base_color(self):
return self._base_color
base_color = GObject.property(
type=object, getter=get_base_color, setter=set_base_color)
def set_paused(self, paused):
self._paused = paused
if self._paused:
self._pulser.stop()
elif self._pulsing:
self._pulser.start(restart=False)
def get_paused(self):
return self._paused
paused = GObject.property(
type=bool, default=False, getter=get_paused, setter=set_paused)
def set_pulsing(self, pulsing):
self._pulsing = pulsing
if self._paused:
return
if self._pulsing:
self._pulser.start(restart=True)
else:
self._pulser.stop()
def get_pulsing(self):
return self._pulsing
pulsing = GObject.property(
type=bool, default=False, getter=get_pulsing, setter=set_pulsing)
| gpl-2.0 | -7,704,020,449,209,474,000 | -148,436,964,505,541,400 | 29.406639 | 79 | 0.602211 | false |
rwgdrummer/maskgen | maskgen/analytics/dctAnalytic.py | 1 | 17525 | # =============================================================================
# Authors: PAR Government
# Organization: DARPA
#
# Copyright (c) 2016 PAR Government
# All rights reserved.
#
#
# adapted from https://github.com/enmasse/jpeg_read
#==============================================================================
import sys
from math import *
from Tkinter import *
import matplotlib.pyplot as plt
import numpy as np
import logging
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
def memoize (function):
# http://programmingzen.com/2009/05/18/memoization-in-ruby-and-python/
cache = {}
def decorated_function (*args):
try:
return cache[args]
except KeyError:
val = function (*args)
cache[args] = val
return val
return decorated_function
@memoize
def decodeBits (len, val):
""" Calculate the value from the "additional" bits in the huffman data. """
return val if (val & (1 << len - 1)) else val - ((1 << len) - 1)
def extractCoeffs (data):
dclum = []
dcchr1 = []
dcchr2 = []
aclum = []
acchr1 = []
acchr2 = []
for MCU in data:
lum = MCU[0]
chr1 = MCU[1]
chr2 = MCU[2]
for MCU_component in lum:
if len (MCU_component):
dclum.append (MCU_component[0])
aclum.extend (MCU_component[1:])
for MCU_component in chr1:
if len (MCU_component):
dcchr1.append (MCU_component[0])
acchr1.extend (MCU_component[1:])
for MCU_component in chr2:
if len (MCU_component):
dcchr2.append (MCU_component[0])
acchr2.extend (MCU_component[1:])
return (dclum, dcchr1, dcchr2, aclum, acchr1, acchr2)
def generateHuffmanCodes (huffsize):
""" Calculate the huffman code of each length. """
huffcode = []
k = 0
code = 0
# Magic
for i in range (len (huffsize)):
si = huffsize[i]
for k in range (si):
huffcode.append ((i + 1, code))
code += 1
code <<= 1
return huffcode
def getBits (num, gen):
""" Get "num" bits from gen. """
out = 0
for i in range (num):
out <<= 1
val = gen.next ()
if val != []:
out += val & 0x01
else:
return []
return out
def mapHuffmanCodes (codes, values):
""" Map the huffman code to the right value. """
out = {}
for i in range (len (codes)):
out[codes[i]] = values[i]
return out
def readAPP (type, file):
""" Read APP marker. """
Lp = readWord (file)
Lp -= 2
# If APP0 try to read the JFIF header
# Not really necessary
if type == 0:
identifier = file.read (5)
Lp -= 5
version = file.read (2)
Lp -= 2
units = ord (file.read (1))
Lp -= 1
Xdensity = ord (file.read (1)) << 8
Xdensity |= ord (file.read (1))
Lp -= 2
Ydensity = ord (file.read (1)) << 8
Ydensity |= ord (file.read (1))
Lp -= 2
file.seek (Lp, 1)
def readByte (file):
""" Read a byte from file. """
return ord (file.read (1))
def readWord (file):
""" Read a 16 bit word from file. """
return ord (file.read (1)) << 8 | ord (file.read (1))
def restoreDC (data):
""" Restore the DC values. They are coded as the difference from the
previous DC value of the same component.
"""
out = []
dc_prev = [0 for x in range (len (data[0]))]
# For each MCU
for mcu in data:
# For each component
for comp_num in range (len (mcu)):
# For each DU
for du in range (len (mcu[comp_num])):
if mcu[comp_num][du]:
mcu[comp_num][du][0] += dc_prev[comp_num]
dc_prev[comp_num] = mcu[comp_num][du][0]
out.append (mcu)
return out
class JPEG_Reader:
""" Class for reading DCT coefficients from JPEG files. """
def __init__ (self):
self.huffman_ac_tables = [{}, {}, {}, {}]
self.huffman_dc_tables = [{}, {}, {}, {}]
self.q_table = [[], [], [], []]
self.XYP = 0, 0, 0
self.component = {}
self.num_components = 0
self.mcus_read = 0
self.dc = []
self.inline_dc = 0
self.bit_stream = []
self.EOI = False
def readDCT_Coeffs (self, filename):
""" Reads and returns DCT coefficients from the supplied JPEG file. """
self.__init__ ()
data = []
with open (filename, "rb") as inputFile:
in_char = inputFile.read (1)
while in_char:
if in_char == chr (0xff):
in_char = inputFile.read (1)
in_num = ord (in_char)
if 0xe0 <= in_num <= 0xef:
readAPP (in_num - 0xe0, inputFile)
elif in_num == 0xdb:
self.__readDQT (inputFile)
elif in_num == 0xdc:
self.__readDNL (inputFile)
elif in_num == 0xc4:
self.__readDHT (inputFile)
elif in_num == 0xc8:
print "JPG"
elif 0xc0 <= in_num <= 0xcf:
self.__readSOF (in_num - 0xc0, inputFile)
elif in_num == 0xda:
self.__readSOS (inputFile)
self.bit_stream = self.__readBit (inputFile)
while not self.EOI:
data.append (self.__readMCU ())
in_char = inputFile.read (1)
return extractCoeffs (data if self.inline_dc else restoreDC (data))
def __readBit (self, file):
""" A generator that reads one bit from file and handles markers and
byte stuffing.
"""
input = file.read (1)
while input and not self.EOI:
if input == chr (0xFF):
cmd = file.read (1)
if cmd:
# Byte stuffing
if cmd == chr (0x00):
input = chr (0xFF)
# End of image marker
elif cmd == chr (0xD9):
self.EOI = True
# Restart markers
elif 0xD0 <= ord (cmd) <= 0xD7 and self.inline_dc:
# Reset dc value
self.dc = [0 for i in range (self.num_components + 1)]
input = file.read (1)
else:
input = file.read (1)
#print "CMD: %x" % ord(cmd)
if not self.EOI:
for i in range (7, -1, -1):
# Output next bit
yield (ord (input) >> i) & 0x01
input = file.read (1)
while True:
yield []
def __readDHT (self, file):
""" Read and compute the huffman tables. """
# Read the marker length
Lh = readWord (file)
Lh -= 2
while Lh > 0:
huffsize = []
huffval = []
T = readByte (file)
Th = T & 0x0F
Tc = (T >> 4) & 0x0F
#print "Lh: %d Th: %d Tc: %d" % (Lh, Th, Tc)
Lh -= 1
# Read how many symbols of each length
# up to 16 bits
for i in range (16):
huffsize.append (readByte (file))
Lh -= 1
# Generate the huffman codes
huffcode = generateHuffmanCodes (huffsize)
#print "Huffcode", huffcode
# Read the values that should be mapped to huffman codes
for i in huffcode:
#print i
try:
huffval.append (readByte (file))
Lh -= 1
except TypeError:
continue
# Generate lookup tables
if Tc == 0:
self.huffman_dc_tables[Th] = mapHuffmanCodes (huffcode, huffval)
else:
self.huffman_ac_tables[Th] = mapHuffmanCodes (huffcode, huffval)
def __readDNL (self, file):
""" Read the DNL marker. Changes the number of lines. """
Ld = readWord (file)
Ld -= 2
NL = readWord (file)
Ld -= 2
X, Y, P = self.XYP
if Y == 0:
self.XYP = X, NL, P
def __readDQT (self, file):
""" Read the quantization table. The table is in zigzag order. """
Lq = readWord (file)
Lq -= 2
while Lq > 0:
table = []
Tq = readByte (file)
Pq = Tq >> 4
Tq &= 0xF
Lq -= 1
if Pq == 0:
for i in range (64):
table.append (readByte (file))
Lq -= 1
else:
for i in range (64):
val = readWord (file)
table.append (val)
Lq -= 2
self.q_table[Tq] = table
def __readDU (self, comp_num):
""" Read one data unit with component index comp_num. """
data = []
comp = self.component[comp_num]
huff_tbl = self.huffman_dc_tables[comp['Td']]
# Fill data with 64 coefficients
while len (data) < 64:
key = 0
for bits in range (1, 17):
key_len = []
key <<= 1
# Get one bit from bit_stream
val = getBits (1, self.bit_stream)
if val == []:
break
key |= val
# If huffman code exists
if huff_tbl.has_key ((bits, key)):
key_len = huff_tbl[(bits, key)]
break
# After getting the DC value switch to the AC table
huff_tbl = self.huffman_ac_tables[comp['Ta']]
if key_len == []:
#print (bits, key, bin(key)), "key not found"
break
# If ZRL fill with 16 zero coefficients
elif key_len == 0xF0:
for i in range (16):
data.append (0)
continue
# If not DC coefficient
if len (data) != 0:
# If End of block
if key_len == 0x00:
# Fill the rest of the DU with zeros
while len (data) < 64:
data.append (0)
break
# The first part of the AC key_len is the number of leading
# zeros
for i in range (key_len >> 4):
if len (data) < 64:
data.append (0)
key_len &= 0x0F
if len (data) >= 64:
break
if key_len != 0:
# The rest of key_len is the number of "additional" bits
val = getBits (key_len, self.bit_stream)
if val == []:
break
# Decode the additional bits
num = decodeBits (key_len, val)
# Experimental, doesn't work right
if len (data) == 0 and self.inline_dc:
# The DC coefficient value is added to the DC value from
# the corresponding DU in the previous MCU
num += self.dc[comp_num]
self.dc[comp_num] = num
data.append (num)
else:
data.append (0)
#if len(data) != 64:
#print "Wrong size", len(data)
return data
def __readMCU (self):
""" Read an MCU. """
comp_num = mcu = range (self.num_components)
# For each component
for i in comp_num:
comp = self.component[i + 1]
mcu[i] = []
# For each DU
for j in range (comp['H'] * comp['V']):
if not self.EOI:
mcu[i].append (self.__readDU (i + 1))
self.mcus_read += 1
return mcu
def __readSOF (self, type, file):
""" Read the start of frame marker. """
Lf = readWord (file) # Read the marker length
Lf -= 2
P = readByte (file) # Read the sample precision
Lf -= 1
Y = readWord (file) # Read number of lines
Lf -= 2
X = readWord (file) # Read the number of samples per line
Lf -= 2
Nf = readByte (file) # Read number of components
Lf -= 1
self.XYP = X, Y, P
#print self.XYP
while Lf > 0:
C = readByte (file) # Read component identifier
V = readByte (file) # Read sampling factors
Tq = readByte (file)
Lf -= 3
H = V >> 4
V &= 0xF
# Assign horizontal & vertical sampling factors and qtable
self.component[C] = { 'H' : H, 'V' : V, 'Tq' : Tq }
def __readSOS (self, file):
""" Read the start of scan marker. """
Ls = readWord (file)
Ls -= 2
Ns = readByte (file) # Read number of components in scan
Ls -= 1
for i in range (Ns):
Cs = readByte (file) # Read the scan component selector
Ls -= 1
Ta = readByte (file) # Read the huffman table selectors
Ls -= 1
Td = Ta >> 4
Ta &= 0xF
# Assign the DC huffman table
self.component[Cs]['Td'] = Td
# Assign the AC huffman table
self.component[Cs]['Ta'] = Ta
Ss = readByte (file) # Should be zero if baseline DCT
Ls -= 1
Se = readByte (file) # Should be 63 if baseline DCT
Ls -= 1
A = readByte (file) # Should be zero if baseline DCT
Ls -= 1
#print "Ns:%d Ss:%d Se:%d A:%02X" % (Ns, Ss, Se, A)
self.num_components = Ns
self.dc = [0 for i in range (self.num_components + 1)]
def dequantize (self, mcu):
""" Dequantize an MCU. """
out = mcu
# For each coefficient in each DU in each component, multiply by the
# corresponding value in the quantization table.
for c in range (len (out)):
for du in range (len (out[c])):
for i in range (len (out[c][du])):
out[c][du][i] *= self.q_table[self.component[c + 1]['Tq']][i]
return out
def getHist(filename):
try:
import JPEG_MetaInfoPy
hist, lowValue = JPEG_MetaInfoPy.generateHistogram(filename)
return np.asarray(hist),np.asarray(range(lowValue,lowValue+len(hist)+1))
except Exception as ex:
logging.getLogger('maskgen').warn('External JPEG_MetaInfoPy failed: {}'.format(str(ex)))
DC = JPEG_Reader().readDCT_Coeffs(filename)[0]
minDC = min(DC)
maxDC = max(DC)
binCount = maxDC - minDC + 1
return np.histogram (DC, bins=binCount,
range=(minDC, maxDC + 1))
class JPEG_View:
def appliesTo (self, filename):
return filename.lower ().endswith (('jpg', 'jpeg'))
def draw (self, frame, filename):
fig = plt.figure ();
self._plotHistogram (fig, getHist(filename))
canvas = FigureCanvasTkAgg (fig, frame)
canvas.show ()
canvas.get_tk_widget ().pack (side=BOTTOM, fill=BOTH, expand=True)
def _labelSigma (self, figure, sigma):
""" Add a label of the value of sigma to the histogram plot. """
props = dict (boxstyle='round', facecolor='wheat', alpha=0.5)
figure.text (0.25, 0.85, '$\sigma=%.2f$' % (sigma),
fontsize=14, verticalalignment='top', bbox=props)
class DCTView (JPEG_View):
def screenName (self):
return 'JPG DCT Histogram'
def _plotHistogram (self, figure, histogram):
ordinates, abscissae = histogram
plt.bar (abscissae[:-1], ordinates, 1);
self._labelSigma (figure, ordinates.std ())
class FFT_DCTView (JPEG_View):
def screenName (self):
return 'FFT(JPG DCT Histogram)'
def _plotHistogram (self, figure, histogram):
# Calculate the DFT of the zero-meaned histogram values. The n/2+1
# positive frequencies are returned by rfft. Mirror the result back
# into ordinates.
#
mean = histogram[0].mean ()
posFreqs = abs (np.fft.rfft ([i - mean for i in histogram[0]]))
ordinates = list (reversed (posFreqs))
ordinates.extend (posFreqs[1:])
n = len (posFreqs)
abscissae = range (1 - n, n)
plt.plot (abscissae, ordinates, 'k')
plt.plot (abscissae, self.__hat (ordinates), 'r')
self._labelSigma (figure, np.std (ordinates))
def __hat (self, data):
length = len (data)
intercept1 = int (length * 0.425)
intercept2 = int (length * 0.575)
amp = max (data)
threshold = amp * 0.15
arr = np.full (length, threshold)
arr[intercept1:intercept2] = amp
return arr
if __name__ == "__main__":
DCTView ().draw (None, sys.argv[1])
FFT_DCTView ().draw (None, sys.argv[1]) | bsd-3-clause | 7,085,059,685,402,964,000 | -8,270,985,173,356,394,000 | 29.06175 | 96 | 0.473096 | false |
scikit-optimize/scikit-optimize | skopt/callbacks.py | 1 | 9377 | """Monitor and influence the optimization procedure via callbacks.
Callbacks are callables which are invoked after each iteration of the optimizer
and are passed the results "so far". Callbacks can monitor progress, or stop
the optimization early by returning `True`.
"""
try:
from collections.abc import Callable
except ImportError:
from collections import Callable
from time import time
import numpy as np
from skopt.utils import dump
def check_callback(callback):
"""
Check if callback is a callable or a list of callables.
"""
if callback is not None:
if isinstance(callback, Callable):
return [callback]
elif (isinstance(callback, list) and
all([isinstance(c, Callable) for c in callback])):
return callback
else:
raise ValueError("callback should be either a callable or "
"a list of callables.")
else:
return []
class VerboseCallback(object):
"""
Callback to control the verbosity.
Parameters
----------
n_init : int, optional
Number of points provided by the user which are yet to be
evaluated. This is equal to `len(x0)` when `y0` is None
n_random : int, optional
Number of points randomly chosen.
n_total : int
Total number of func calls.
Attributes
----------
iter_no : int
Number of iterations of the optimization routine.
"""
def __init__(self, n_total, n_init=0, n_random=0):
self.n_init = n_init
self.n_random = n_random
self.n_total = n_total
self.iter_no = 1
self._start_time = time()
self._print_info(start=True)
def _print_info(self, start=True):
iter_no = self.iter_no
if start:
status = "started"
eval_status = "Evaluating function"
search_status = "Searching for the next optimal point."
else:
status = "ended"
eval_status = "Evaluation done"
search_status = "Search finished for the next optimal point."
if iter_no <= self.n_init:
print("Iteration No: %d %s. %s at provided point."
% (iter_no, status, eval_status))
elif self.n_init < iter_no <= (self.n_random + self.n_init):
print("Iteration No: %d %s. %s at random point."
% (iter_no, status, eval_status))
else:
print("Iteration No: %d %s. %s"
% (iter_no, status, search_status))
def __call__(self, res):
"""
Parameters
----------
res : `OptimizeResult`, scipy object
The optimization as a OptimizeResult object.
"""
time_taken = time() - self._start_time
self._print_info(start=False)
curr_y = res.func_vals[-1]
curr_min = res.fun
print("Time taken: %0.4f" % time_taken)
print("Function value obtained: %0.4f" % curr_y)
print("Current minimum: %0.4f" % curr_min)
self.iter_no += 1
if self.iter_no <= self.n_total:
self._print_info(start=True)
self._start_time = time()
class TimerCallback(object):
"""
Log the elapsed time between each iteration of the minimization loop.
The time for each iteration is stored in the `iter_time` attribute which
you can inspect after the minimization has completed.
Attributes
----------
iter_time : list, shape (n_iter,)
`iter_time[i-1]` gives the time taken to complete iteration `i`
"""
def __init__(self):
self._time = time()
self.iter_time = []
def __call__(self, res):
"""
Parameters
----------
res : `OptimizeResult`, scipy object
The optimization as a OptimizeResult object.
"""
elapsed_time = time() - self._time
self.iter_time.append(elapsed_time)
self._time = time()
class EarlyStopper(object):
"""Decide to continue or not given the results so far.
The optimization procedure will be stopped if the callback returns True.
"""
def __call__(self, result):
"""
Parameters
----------
result : `OptimizeResult`, scipy object
The optimization as a OptimizeResult object.
"""
return self._criterion(result)
def _criterion(self, result):
"""Compute the decision to stop or not.
Classes inheriting from `EarlyStop` should use this method to
implement their decision logic.
Parameters
----------
result : `OptimizeResult`, scipy object
The optimization as a OptimizeResult object.
Returns
-------
decision : boolean or None
Return True/False if the criterion can make a decision or `None` if
there is not enough data yet to make a decision.
"""
raise NotImplementedError("The _criterion method should be implemented"
" by subclasses of EarlyStopper.")
class DeltaXStopper(EarlyStopper):
"""Stop the optimization when ``|x1 - x2| < delta``
If the last two positions at which the objective has been evaluated
are less than `delta` apart stop the optimization procedure.
"""
def __init__(self, delta):
super(EarlyStopper, self).__init__()
self.delta = delta
def _criterion(self, result):
if len(result.x_iters) >= 2:
return result.space.distance(result.x_iters[-2],
result.x_iters[-1]) < self.delta
else:
return None
class DeltaYStopper(EarlyStopper):
"""Stop the optimization if the `n_best` minima are within `delta`
Stop the optimizer if the absolute difference between the `n_best`
objective values is less than `delta`.
"""
def __init__(self, delta, n_best=5):
super(EarlyStopper, self).__init__()
self.delta = delta
self.n_best = n_best
def _criterion(self, result):
if len(result.func_vals) >= self.n_best:
func_vals = np.sort(result.func_vals)
worst = func_vals[self.n_best - 1]
best = func_vals[0]
# worst is always larger, so no need for abs()
return worst - best < self.delta
else:
return None
class HollowIterationsStopper(EarlyStopper):
"""
Stop if the improvement over the last n iterations is below a threshold.
"""
def __init__(self, n_iterations, threshold=0):
super(HollowIterationsStopper, self).__init__()
self.n_iterations = n_iterations
self.threshold = abs(threshold)
def _criterion(self, result):
if len(result.func_vals) <= self.n_iterations:
return False
cummin = np.minimum.accumulate(result.func_vals)
return cummin[-self.n_iterations - 1] - cummin[-1] <= self.threshold
class DeadlineStopper(EarlyStopper):
"""
Stop the optimization before running out of a fixed budget of time.
Attributes
----------
iter_time : list, shape (n_iter,)
`iter_time[i-1]` gives the time taken to complete iteration `i`
Parameters
----------
total_time : float
fixed budget of time (seconds) that the optimization must
finish within.
"""
def __init__(self, total_time):
super(DeadlineStopper, self).__init__()
self._time = time()
self.iter_time = []
self.total_time = total_time
def _criterion(self, result):
elapsed_time = time() - self._time
self.iter_time.append(elapsed_time)
self._time = time()
if result.x_iters:
time_remaining = self.total_time - np.sum(self.iter_time)
return time_remaining <= np.max(self.iter_time)
else:
return None
class ThresholdStopper(EarlyStopper):
"""
Stop the optimization when the objective value is lower
than the given threshold.
"""
def __init__(self, threshold: float) -> None:
super(EarlyStopper, self).__init__()
self.threshold = threshold
def _criterion(self, result) -> bool:
return np.any([val <= self.threshold for val in result.func_vals])
class CheckpointSaver(object):
"""
Save current state after each iteration with :class:`skopt.dump`.
Examples
--------
>>> import skopt
>>> def obj_fun(x):
... return x[0]**2
>>> checkpoint_callback = skopt.callbacks.CheckpointSaver("./result.pkl")
>>> skopt.gp_minimize(obj_fun, [(-2, 2)], n_calls=10,
... callback=[checkpoint_callback]) # doctest: +SKIP
Parameters
----------
checkpoint_path : string
location where checkpoint will be saved to;
dump_options : string
options to pass on to `skopt.dump`, like `compress=9`
"""
def __init__(self, checkpoint_path, **dump_options):
self.checkpoint_path = checkpoint_path
self.dump_options = dump_options
def __call__(self, res):
"""
Parameters
----------
res : `OptimizeResult`, scipy object
The optimization as a OptimizeResult object.
"""
dump(res, self.checkpoint_path, **self.dump_options)
| bsd-3-clause | -4,365,682,753,648,825,000 | 8,542,724,475,783,501,000 | 28.394984 | 79 | 0.581743 | false |
bjackman/workload-automation | wlauto/resource_getters/standard.py | 1 | 23149 | # Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This module contains the standard set of resource getters used by Workload Automation.
"""
import os
import sys
import shutil
import inspect
import httplib
import logging
import json
import requests
from wlauto import ResourceGetter, GetterPriority, Parameter, NO_ONE, settings, __file__ as __base_filepath
from wlauto.exceptions import ResourceError
from wlauto.utils.android import ApkInfo
from wlauto.utils.misc import ensure_directory_exists as _d, ensure_file_directory_exists as _f, sha256, urljoin
from wlauto.utils.types import boolean
from wlauto.utils.revent import ReventRecording
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
class PackageFileGetter(ResourceGetter):
name = 'package_file'
description = """
Looks for exactly one file with the specified extension in the owner's directory. If a version
is specified on invocation of get, it will filter the discovered file based on that version.
Versions are treated as case-insensitive.
"""
extension = None
def register(self):
self.resolver.register(self, self.extension, GetterPriority.package)
def get(self, resource, **kwargs):
resource_dir = os.path.dirname(sys.modules[resource.owner.__module__].__file__)
version = kwargs.get('version')
return get_from_location_by_extension(resource, resource_dir, self.extension, version)
class EnvironmentFileGetter(ResourceGetter):
name = 'environment_file'
description = """
Looks for exactly one file with the specified extension in the owner's directory. If a version
is specified on invocation of get, it will filter the discovered file based on that version.
Versions are treated as case-insensitive.
"""
extension = None
def register(self):
self.resolver.register(self, self.extension, GetterPriority.environment)
def get(self, resource, **kwargs):
resource_dir = resource.owner.dependencies_directory
version = kwargs.get('version')
return get_from_location_by_extension(resource, resource_dir, self.extension, version)
class ReventGetter(ResourceGetter):
"""Implements logic for identifying revent files."""
def get_base_location(self, resource):
raise NotImplementedError()
def register(self):
self.resolver.register(self, 'revent', GetterPriority.package)
def get(self, resource, **kwargs):
# name format: [model/device_name.stage.revent]
device_model = resource.owner.device.get_device_model()
wa_device_name = resource.owner.device.name
for name in [device_model, wa_device_name]:
if not name:
continue
filename = '.'.join([name, resource.stage, 'revent']).lower()
self.logger.debug('Trying to get {0}.'.format(str(filename)))
location = _d(os.path.join(self.get_base_location(resource), 'revent_files'))
for candidate in os.listdir(location):
if candidate.lower() == filename.lower():
path = os.path.join(location, candidate)
try:
ReventRecording(path).close() # Check valid recording
return path
except ValueError as e:
self.logger.warning(e.message)
class PackageApkGetter(PackageFileGetter):
name = 'package_apk'
extension = 'apk'
description = """
Uses the same dependency resolution mechanism as ``PackageFileGetter``.
"""
def get(self, resource, **kwargs):
resource_dir = os.path.dirname(sys.modules[resource.owner.__module__].__file__)
version = kwargs.get('version')
variant = kwargs.get('variant_name')
return get_from_location_by_extension(resource, resource_dir, self.extension, version, variant=variant)
class PackageJarGetter(PackageFileGetter):
name = 'package_jar'
extension = 'jar'
class PackageReventGetter(ReventGetter):
name = 'package_revent'
def get_base_location(self, resource):
return get_owner_path(resource)
class EnvironmentApkGetter(EnvironmentFileGetter):
name = 'environment_apk'
extension = 'apk'
description = """
Uses the same dependency resolution mechanism as ``EnvironmentFileGetter``.
"""
def get(self, resource, **kwargs):
resource_dir = resource.owner.dependencies_directory
version = kwargs.get('version')
variant = kwargs.get('variant_name')
return get_from_location_by_extension(resource, resource_dir, self.extension, version, variant=variant)
class EnvironmentJarGetter(EnvironmentFileGetter):
name = 'environment_jar'
extension = 'jar'
class EnvironmentReventGetter(ReventGetter):
name = 'enviroment_revent'
def get_base_location(self, resource):
return resource.owner.dependencies_directory
class ExecutableGetter(ResourceGetter):
name = 'exe_getter'
resource_type = 'executable'
priority = GetterPriority.environment
def get(self, resource, **kwargs):
if settings.binaries_repository:
path = os.path.join(settings.binaries_repository, resource.platform, resource.filename)
if os.path.isfile(path):
return path
class PackageExecutableGetter(ExecutableGetter):
name = 'package_exe_getter'
priority = GetterPriority.package
def get(self, resource, **kwargs):
path = os.path.join(get_owner_path(resource), 'bin', resource.platform, resource.filename)
if os.path.isfile(path):
return path
class EnvironmentExecutableGetter(ExecutableGetter):
name = 'env_exe_getter'
def get(self, resource, **kwargs):
paths = [
os.path.join(resource.owner.dependencies_directory, 'bin',
resource.platform, resource.filename),
os.path.join(settings.environment_root, 'bin',
resource.platform, resource.filename),
]
for path in paths:
if os.path.isfile(path):
return path
class DependencyFileGetter(ResourceGetter):
name = 'filer'
description = """
Gets resources from the specified mount point. Copies them the local dependencies
directory, and returns the path to the local copy.
"""
resource_type = 'file'
relative_path = '' # May be overridden by subclasses.
priority = GetterPriority.remote
parameters = [
Parameter('mount_point', default='/', global_alias='remote_assets_path',
description='Local mount point for the remote filer.'),
]
def __init__(self, resolver, **kwargs):
super(DependencyFileGetter, self).__init__(resolver, **kwargs)
def get(self, resource, **kwargs):
force = kwargs.get('force')
remote_path = os.path.join(self.mount_point, self.relative_path, resource.path)
local_path = _f(os.path.join(settings.dependencies_directory, '__remote',
resource.owner.name, os.path.basename(resource.path)))
if not os.path.exists(local_path) or force:
if not os.path.exists(remote_path):
return None
self.logger.debug('Copying {} to {}'.format(remote_path, local_path))
shutil.copy(remote_path, local_path)
return local_path
class PackageCommonDependencyGetter(ResourceGetter):
name = 'packaged_common_dependency'
resource_type = 'file'
priority = GetterPriority.package - 1 # check after owner-specific locations
def get(self, resource, **kwargs):
path = os.path.join(settings.package_directory, 'common', resource.path)
if os.path.exists(path):
return path
class EnvironmentCommonDependencyGetter(ResourceGetter):
name = 'environment_common_dependency'
resource_type = 'file'
priority = GetterPriority.environment - 1 # check after owner-specific locations
def get(self, resource, **kwargs):
path = os.path.join(settings.dependencies_directory,
os.path.basename(resource.path))
if os.path.exists(path):
return path
class PackageDependencyGetter(ResourceGetter):
name = 'packaged_dependency'
resource_type = 'file'
priority = GetterPriority.package
def get(self, resource, **kwargs):
owner_path = inspect.getfile(resource.owner.__class__)
path = os.path.join(os.path.dirname(owner_path), resource.path)
if os.path.exists(path):
return path
class EnvironmentDependencyGetter(ResourceGetter):
name = 'environment_dependency'
resource_type = 'file'
priority = GetterPriority.environment
def get(self, resource, **kwargs):
path = os.path.join(resource.owner.dependencies_directory, os.path.basename(resource.path))
if os.path.exists(path):
return path
class ExtensionAssetGetter(DependencyFileGetter):
name = 'extension_asset'
resource_type = 'extension_asset'
class HttpGetter(ResourceGetter):
name = 'http_assets'
description = """
Downloads resources from a server based on an index fetched from the specified URL.
Given a URL, this will try to fetch ``<URL>/index.json``. The index file maps extension
names to a list of corresponing asset descriptons. Each asset description continas a path
(relative to the base URL) of the resource and a SHA256 hash, so that this Getter can
verify whether the resource on the remote has changed.
For example, let's assume we want to get the APK file for workload "foo", and that
assets are hosted at ``http://example.com/assets``. This Getter will first try to
donwload ``http://example.com/assests/index.json``. The index file may contian
something like ::
{
"foo": [
{
"path": "foo-app.apk",
"sha256": "b14530bb47e04ed655ac5e80e69beaa61c2020450e18638f54384332dffebe86"
},
{
"path": "subdir/some-other-asset.file",
"sha256": "48d9050e9802246d820625717b72f1c2ba431904b8484ca39befd68d1dbedfff"
}
]
}
This Getter will look through the list of assets for "foo" (in this case, two) check
the paths until it finds one matching the resource (in this case, "foo-app.apk").
Finally, it will try to dowload that file relative to the base URL and extension name
(in this case, "http://example.com/assets/foo/foo-app.apk"). The downloaded version
will be cached locally, so that in the future, the getter will check the SHA256 hash
of the local file against the one advertised inside index.json, and provided that hasn't
changed, it won't try to download the file again.
"""
priority = GetterPriority.remote
resource_type = ['apk', 'file', 'jar', 'revent', 'executable']
parameters = [
Parameter('url', global_alias='remote_assets_url',
description="""URL of the index file for assets on an HTTP server."""),
Parameter('username',
description="""User name for authenticating with assets URL"""),
Parameter('password',
description="""Password for authenticationg with assets URL"""),
Parameter('always_fetch', kind=boolean, default=False, global_alias='always_fetch_remote_assets',
description="""If ``True``, will always attempt to fetch assets from the remote, even if
a local cached copy is available."""),
Parameter('chunk_size', kind=int, default=1024,
description="""Chunk size for streaming large assets."""),
]
def __init__(self, resolver, **kwargs):
super(HttpGetter, self).__init__(resolver, **kwargs)
self.index = None
def get(self, resource, **kwargs):
if not resource.owner:
return # TODO: add support for unowned resources
if not self.index:
self.index = self.fetch_index()
asset = self.resolve_resource(resource)
if not asset:
return
return self.download_asset(asset, resource.owner.name)
def fetch_index(self):
if not self.url:
return {}
index_url = urljoin(self.url, 'index.json')
response = self.geturl(index_url)
if response.status_code != httplib.OK:
message = 'Could not fetch "{}"; recieved "{} {}"'
self.logger.error(message.format(index_url, response.status_code, response.reason))
return {}
return json.loads(response.content)
def download_asset(self, asset, owner_name):
url = urljoin(self.url, owner_name, asset['path'])
local_path = _f(os.path.join(settings.dependencies_directory, '__remote',
owner_name, asset['path'].replace('/', os.sep)))
if os.path.exists(local_path) and not self.always_fetch:
local_sha = sha256(local_path)
if local_sha == asset['sha256']:
self.logger.debug('Local SHA256 matches; not re-downloading')
return local_path
self.logger.debug('Downloading {}'.format(url))
response = self.geturl(url, stream=True)
if response.status_code != httplib.OK:
message = 'Could not download asset "{}"; recieved "{} {}"'
self.logger.warning(message.format(url, response.status_code, response.reason))
return
with open(local_path, 'wb') as wfh:
for chunk in response.iter_content(chunk_size=self.chunk_size):
wfh.write(chunk)
return local_path
def geturl(self, url, stream=False):
if self.username:
auth = (self.username, self.password)
else:
auth = None
return requests.get(url, auth=auth, stream=stream)
def resolve_resource(self, resource):
# pylint: disable=too-many-branches,too-many-locals
assets = self.index.get(resource.owner.name, {})
if not assets:
return {}
if resource.name in ['apk', 'jar']:
paths = [a['path'] for a in assets]
version = getattr(resource, 'version', None)
found = get_from_list_by_extension(resource, paths, resource.name, version)
if found:
for a in assets:
if a['path'] == found:
return a
elif resource.name == 'revent':
device_model = resource.owner.device.get_device_model()
wa_device_name = resource.owner.device.name
for name in [device_model, wa_device_name]:
if not name:
continue
filename = '.'.join([name, resource.stage, 'revent']).lower()
for asset in assets:
pathname = os.path.basename(asset['path']).lower()
if pathname == filename:
try:
ReventRecording(asset['path']).close() # Check valid recording
return asset
except ValueError as e:
self.logger.warning(e.message)
elif resource.name == 'executable':
platform = resource.platform
path = '/'.join(['bin', platform, resource.filename])
for asset in assets:
if asset['path'].lower() == path.lower():
return asset
else: # file
for asset in assets:
if asset['path'].lower() == resource.path.lower():
return asset
class RemoteFilerGetter(ResourceGetter):
name = 'filer_assets'
description = """
Finds resources on a (locally mounted) remote filer and caches them locally.
This assumes that the filer is mounted on the local machine (e.g. as a samba share).
"""
priority = GetterPriority.remote
resource_type = ['apk', 'file', 'jar', 'revent']
parameters = [
Parameter('remote_path', global_alias='remote_assets_path', default='',
description="""Path, on the local system, where the assets are located."""),
Parameter('always_fetch', kind=boolean, default=False, global_alias='always_fetch_remote_assets',
description="""If ``True``, will always attempt to fetch assets from the remote, even if
a local cached copy is available."""),
]
def get(self, resource, **kwargs):
version = kwargs.get('version')
if resource.owner:
remote_path = os.path.join(self.remote_path, resource.owner.name)
local_path = os.path.join(settings.environment_root, '__filer', resource.owner.dependencies_directory)
message = 'resource={}, version={}, remote_path={}, local_path={}'
self.logger.debug(message.format(resource, version, remote_path, local_path))
return self.try_get_resource(resource, version, remote_path, local_path)
else:
result = None
for entry in os.listdir(remote_path):
remote_path = os.path.join(self.remote_path, entry)
local_path = os.path.join(settings.environment_root, '__filer', settings.dependencies_directory, entry)
result = self.try_get_resource(resource, version, remote_path, local_path)
if result:
break
return result
def try_get_resource(self, resource, version, remote_path, local_path):
if not self.always_fetch:
result = self.get_from(resource, version, local_path)
if result:
return result
if remote_path:
# Didn't find it cached locally; now check the remoted
result = self.get_from(resource, version, remote_path)
if not result:
return result
else: # remote path is not set
return None
# Found it remotely, cache locally, then return it
local_full_path = os.path.join(_d(local_path), os.path.basename(result))
self.logger.debug('cp {} {}'.format(result, local_full_path))
shutil.copy(result, local_full_path)
return local_full_path
def get_from(self, resource, version, location): # pylint: disable=no-self-use
# pylint: disable=too-many-branches
if resource.name in ['apk', 'jar']:
return get_from_location_by_extension(resource, location, resource.name, version)
elif resource.name == 'file':
filepath = os.path.join(location, resource.path)
if os.path.exists(filepath):
return filepath
elif resource.name == 'revent':
device_model = resource.owner.device.get_device_model()
wa_device_name = resource.owner.device.name
for name in [device_model, wa_device_name]:
if not name:
continue
filename = '.'.join([name, resource.stage, 'revent']).lower()
alternate_location = os.path.join(location, 'revent_files')
# There tends to be some confusion as to where revent files should
# be placed. This looks both in the extension's directory, and in
# 'revent_files' subdirectory under it, if it exists.
path = None
if os.path.isdir(alternate_location):
for candidate in os.listdir(alternate_location):
if candidate.lower() == filename.lower():
path = os.path.join(alternate_location, candidate)
if os.path.isdir(location):
for candidate in os.listdir(location):
if candidate.lower() == filename.lower():
path = os.path.join(location, candidate)
if path:
try:
ReventRecording(path).close() # Check valid recording
return path
except ValueError as e:
self.logger.warning(e.message)
else:
raise ValueError('Unexpected resource type: {}'.format(resource.name))
# Utility functions
def get_from_location_by_extension(resource, location, extension, version=None, variant=None):
try:
found_files = [os.path.join(location, f) for f in os.listdir(location)]
except OSError:
return None
try:
return get_from_list_by_extension(resource, found_files, extension, version, variant=variant)
except ResourceError:
raise ResourceError('More than one .{} found in {} for {}.'.format(extension,
location,
resource.owner.name))
def get_from_list_by_extension(resource, filelist, extension, version=None, variant=None):
filelist = [ff for ff in filelist if os.path.splitext(ff)[1].lower().endswith('.' + extension)]
if variant:
filelist = [ff for ff in filelist if variant.lower() in os.path.basename(ff).lower()]
if version:
if extension == 'apk':
filelist = [ff for ff in filelist if version.lower() in ApkInfo(ff).version_name.lower()]
else:
filelist = [ff for ff in filelist if version.lower() in os.path.basename(ff).lower()]
if extension == 'apk':
filelist = [ff for ff in filelist if not ApkInfo(ff).native_code or resource.platform in ApkInfo(ff).native_code]
filelist = [ff for ff in filelist if resource.uiauto == ('com.arm.wlauto.uiauto' in ApkInfo(ff).package)]
if len(filelist) == 1:
return filelist[0]
elif not filelist:
return None
else:
raise ResourceError('More than one .{} found in {} for {}.'.format(extension,
filelist,
resource.owner.name))
def get_owner_path(resource):
if resource.owner is NO_ONE:
return os.path.join(os.path.dirname(__base_filepath), 'common')
else:
return os.path.dirname(sys.modules[resource.owner.__module__].__file__)
| apache-2.0 | 4,664,021,892,576,925,000 | -74,977,237,569,896,620 | 38.70669 | 121 | 0.615016 | false |
thundernet8/WRGameVideos-Server | venv/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.py | 475 | 9162 | """
lockfile.py - Platform-independent advisory file locks.
Requires Python 2.5 unless you apply 2.4.diff
Locking is done on a per-thread basis instead of a per-process basis.
Usage:
>>> lock = LockFile('somefile')
>>> try:
... lock.acquire()
... except AlreadyLocked:
... print 'somefile', 'is locked already.'
... except LockFailed:
... print 'somefile', 'can\\'t be locked.'
... else:
... print 'got lock'
got lock
>>> print lock.is_locked()
True
>>> lock.release()
>>> lock = LockFile('somefile')
>>> print lock.is_locked()
False
>>> with lock:
... print lock.is_locked()
True
>>> print lock.is_locked()
False
>>> lock = LockFile('somefile')
>>> # It is okay to lock twice from the same thread...
>>> with lock:
... lock.acquire()
...
>>> # Though no counter is kept, so you can't unlock multiple times...
>>> print lock.is_locked()
False
Exceptions:
Error - base class for other exceptions
LockError - base class for all locking exceptions
AlreadyLocked - Another thread or process already holds the lock
LockFailed - Lock failed for some other reason
UnlockError - base class for all unlocking exceptions
AlreadyUnlocked - File was not locked.
NotMyLock - File was locked but not by the current thread/process
"""
from __future__ import absolute_import
import sys
import socket
import os
import threading
import time
import urllib
import warnings
import functools
# Work with PEP8 and non-PEP8 versions of threading module.
if not hasattr(threading, "current_thread"):
threading.current_thread = threading.currentThread
if not hasattr(threading.Thread, "get_name"):
threading.Thread.get_name = threading.Thread.getName
__all__ = ['Error', 'LockError', 'LockTimeout', 'AlreadyLocked',
'LockFailed', 'UnlockError', 'NotLocked', 'NotMyLock',
'LinkLockFile', 'MkdirLockFile', 'SQLiteLockFile',
'LockBase', 'locked']
class Error(Exception):
"""
Base class for other exceptions.
>>> try:
... raise Error
... except Exception:
... pass
"""
pass
class LockError(Error):
"""
Base class for error arising from attempts to acquire the lock.
>>> try:
... raise LockError
... except Error:
... pass
"""
pass
class LockTimeout(LockError):
"""Raised when lock creation fails within a user-defined period of time.
>>> try:
... raise LockTimeout
... except LockError:
... pass
"""
pass
class AlreadyLocked(LockError):
"""Some other thread/process is locking the file.
>>> try:
... raise AlreadyLocked
... except LockError:
... pass
"""
pass
class LockFailed(LockError):
"""Lock file creation failed for some other reason.
>>> try:
... raise LockFailed
... except LockError:
... pass
"""
pass
class UnlockError(Error):
"""
Base class for errors arising from attempts to release the lock.
>>> try:
... raise UnlockError
... except Error:
... pass
"""
pass
class NotLocked(UnlockError):
"""Raised when an attempt is made to unlock an unlocked file.
>>> try:
... raise NotLocked
... except UnlockError:
... pass
"""
pass
class NotMyLock(UnlockError):
"""Raised when an attempt is made to unlock a file someone else locked.
>>> try:
... raise NotMyLock
... except UnlockError:
... pass
"""
pass
class LockBase:
"""Base class for platform-specific lock classes."""
def __init__(self, path, threaded=True, timeout=None):
"""
>>> lock = LockBase('somefile')
>>> lock = LockBase('somefile', threaded=False)
"""
self.path = path
self.lock_file = os.path.abspath(path) + ".lock"
self.hostname = socket.gethostname()
self.pid = os.getpid()
if threaded:
t = threading.current_thread()
# Thread objects in Python 2.4 and earlier do not have ident
# attrs. Worm around that.
ident = getattr(t, "ident", hash(t))
self.tname = "-%x" % (ident & 0xffffffff)
else:
self.tname = ""
dirname = os.path.dirname(self.lock_file)
# unique name is mostly about the current process, but must
# also contain the path -- otherwise, two adjacent locked
# files conflict (one file gets locked, creating lock-file and
# unique file, the other one gets locked, creating lock-file
# and overwriting the already existing lock-file, then one
# gets unlocked, deleting both lock-file and unique file,
# finally the last lock errors out upon releasing.
self.unique_name = os.path.join(dirname,
"%s%s.%s%s" % (self.hostname,
self.tname,
self.pid,
hash(self.path)))
self.timeout = timeout
def acquire(self, timeout=None):
"""
Acquire the lock.
* If timeout is omitted (or None), wait forever trying to lock the
file.
* If timeout > 0, try to acquire the lock for that many seconds. If
the lock period expires and the file is still locked, raise
LockTimeout.
* If timeout <= 0, raise AlreadyLocked immediately if the file is
already locked.
"""
raise NotImplemented("implement in subclass")
def release(self):
"""
Release the lock.
If the file is not locked, raise NotLocked.
"""
raise NotImplemented("implement in subclass")
def is_locked(self):
"""
Tell whether or not the file is locked.
"""
raise NotImplemented("implement in subclass")
def i_am_locking(self):
"""
Return True if this object is locking the file.
"""
raise NotImplemented("implement in subclass")
def break_lock(self):
"""
Remove a lock. Useful if a locking thread failed to unlock.
"""
raise NotImplemented("implement in subclass")
def __enter__(self):
"""
Context manager support.
"""
self.acquire()
return self
def __exit__(self, *_exc):
"""
Context manager support.
"""
self.release()
def __repr__(self):
return "<%s: %r -- %r>" % (self.__class__.__name__, self.unique_name,
self.path)
def _fl_helper(cls, mod, *args, **kwds):
warnings.warn("Import from %s module instead of lockfile package" % mod,
DeprecationWarning, stacklevel=2)
# This is a bit funky, but it's only for awhile. The way the unit tests
# are constructed this function winds up as an unbound method, so it
# actually takes three args, not two. We want to toss out self.
if not isinstance(args[0], str):
# We are testing, avoid the first arg
args = args[1:]
if len(args) == 1 and not kwds:
kwds["threaded"] = True
return cls(*args, **kwds)
def LinkFileLock(*args, **kwds):
"""Factory function provided for backwards compatibility.
Do not use in new code. Instead, import LinkLockFile from the
lockfile.linklockfile module.
"""
from . import linklockfile
return _fl_helper(linklockfile.LinkLockFile, "lockfile.linklockfile",
*args, **kwds)
def MkdirFileLock(*args, **kwds):
"""Factory function provided for backwards compatibility.
Do not use in new code. Instead, import MkdirLockFile from the
lockfile.mkdirlockfile module.
"""
from . import mkdirlockfile
return _fl_helper(mkdirlockfile.MkdirLockFile, "lockfile.mkdirlockfile",
*args, **kwds)
def SQLiteFileLock(*args, **kwds):
"""Factory function provided for backwards compatibility.
Do not use in new code. Instead, import SQLiteLockFile from the
lockfile.mkdirlockfile module.
"""
from . import sqlitelockfile
return _fl_helper(sqlitelockfile.SQLiteLockFile, "lockfile.sqlitelockfile",
*args, **kwds)
def locked(path, timeout=None):
"""Decorator which enables locks for decorated function.
Arguments:
- path: path for lockfile.
- timeout (optional): Timeout for acquiring lock.
Usage:
@locked('/var/run/myname', timeout=0)
def myname(...):
...
"""
def decor(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
lock = FileLock(path, timeout=timeout)
lock.acquire()
try:
return func(*args, **kwargs)
finally:
lock.release()
return wrapper
return decor
if hasattr(os, "link"):
from . import linklockfile as _llf
LockFile = _llf.LinkLockFile
else:
from . import mkdirlockfile as _mlf
LockFile = _mlf.MkdirLockFile
FileLock = LockFile
| gpl-2.0 | 8,405,935,521,867,256,000 | 6,009,576,896,836,694,000 | 27.104294 | 79 | 0.59332 | false |
peterbraden/tensorflow | tensorflow/python/kernel_tests/sparse_matmul_op_test.py | 9 | 3990 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.tf.matmul."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
def RandMatrix(rows, cols, tr):
if tr:
rows, cols = cols, rows
return (np.clip(np.random.uniform(low=-100.0, high=100.0, size=rows * cols),
0, 100) / 100).reshape([rows, cols]).astype(np.float32)
class SparseMatMulTest(tf.test.TestCase):
def _testCpuMatmul(self, x, y, tr_a=False, tr_b=False,
sp_a=True, sp_b=False):
x_mat = np.matrix(x)
if tr_a:
x_mat = np.transpose(x_mat)
y_mat = np.matrix(y)
if tr_b:
y_mat = np.transpose(y_mat)
np_ans = x_mat * y_mat
with self.test_session(use_gpu=False):
tf_ans = tf.matmul(x, y,
transpose_a=tr_a, transpose_b=tr_b,
a_is_sparse=sp_a,
b_is_sparse=sp_b)
out = tf_ans.eval()
self.assertAllClose(np_ans, out, rtol=1e-4, atol=1e-4)
self.assertShapeEqual(np_ans, tf_ans)
def testFloatBasic(self):
x = np.arange(0., 4.).reshape([4, 1]).astype(np.float32)
y = np.arange(-1., 1.).reshape([1, 2]).astype(np.float32)
self._testCpuMatmul(x, y)
# Tests setting one dimension to be a high value.
def testFloatLarge(self):
r1 = np.random.randint(6000, 20000)
r2 = np.random.randint(1, 10)
r3 = np.random.randint(1, 10)
for m, k, n in [(r1, r2, r3),
(r2, r1, r3),
(r2, r3, r1)]:
x = RandMatrix(m, k, False)
y = RandMatrix(k, n, False)
self._testCpuMatmul(x, y)
self._testCpuMatmul(x, y, sp_a=False, sp_b=True)
# Tests random sized matrices.
def testFloatRandom(self):
for _ in range(10):
for tr_a in [True, False]:
for tr_b in [True, False]:
for sp_a in [True, False]:
for sp_b in [True, False]:
n, k, m = np.random.randint(1, 100, size=3)
x = RandMatrix(n, k, tr_a)
y = RandMatrix(k, m, tr_b)
self._testCpuMatmul(x, y, tr_a, tr_b, sp_a, sp_b)
class MatMulGradientTest(tf.test.TestCase):
def _testGradients(self, tr_a, tr_b, sp_a, sp_b, name):
with self.test_session():
a = tf.constant(RandMatrix(3, 2, tr_a), dtype=tf.float32)
b = tf.constant(RandMatrix(2, 4, tr_b), dtype=tf.float32)
m = tf.matmul(a, b,
name=name,
transpose_a=tr_a,
transpose_b=tr_b,
a_is_sparse=sp_a,
b_is_sparse=sp_b)
err = (tf.test.compute_gradient_error(a, [2, 3]
if tr_a else [3, 2], m, [3, 4]) +
tf.test.compute_gradient_error(b, [4, 2]
if tr_b else [2, 4], m, [3, 4]))
print("sparse_matmul gradient err = ", err)
self.assertLess(err, 1e-3)
def testGradientInput(self):
for tr_a in [True, False]:
for tr_b in [True, False]:
for sp_a in [True, False]:
for sp_b in [True, False]:
name = "sparse_matmul_%s_%s_%s_%s" % (tr_a, tr_b, sp_a, sp_b)
self._testGradients(tr_a, tr_b, sp_a, sp_b, name)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 | 1,122,443,561,154,331,400 | 1,522,752,873,923,512,600 | 34.945946 | 80 | 0.554386 | false |
pythonalliance/uno2bot | errors.py | 1 | 1077 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Telegram bot to play UNO in group chats
# Copyright (c) 2016 Jannes Höke <uno@jhoeke.de>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class NoGameInChatError(Exception):
pass
class AlreadyJoinedError(Exception):
pass
class LobbyClosedError(Exception):
pass
class NotEnoughPlayersError(Exception):
pass
class DeckEmptyError(Exception):
pass
class PlayerLeftError(Exception):
pass
| agpl-3.0 | -1,501,440,139,413,533,700 | 2,313,102,109,161,027,000 | 24.619048 | 74 | 0.749071 | false |
partofthething/home-assistant | homeassistant/components/zha/core/const.py | 1 | 10279 | """All constants related to the ZHA component."""
import enum
import logging
from typing import List
import bellows.zigbee.application
from zigpy.config import CONF_DEVICE_PATH # noqa: F401 # pylint: disable=unused-import
import zigpy_cc.zigbee.application
import zigpy_deconz.zigbee.application
import zigpy_xbee.zigbee.application
import zigpy_zigate.zigbee.application
import zigpy_znp.zigbee.application
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR
from homeassistant.components.climate import DOMAIN as CLIMATE
from homeassistant.components.cover import DOMAIN as COVER
from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER
from homeassistant.components.fan import DOMAIN as FAN
from homeassistant.components.light import DOMAIN as LIGHT
from homeassistant.components.lock import DOMAIN as LOCK
from homeassistant.components.number import DOMAIN as NUMBER
from homeassistant.components.sensor import DOMAIN as SENSOR
from homeassistant.components.switch import DOMAIN as SWITCH
from .typing import CALLABLE_T
ATTR_ARGS = "args"
ATTR_ATTRIBUTE = "attribute"
ATTR_ATTRIBUTE_ID = "attribute_id"
ATTR_ATTRIBUTE_NAME = "attribute_name"
ATTR_AVAILABLE = "available"
ATTR_CLUSTER_ID = "cluster_id"
ATTR_CLUSTER_TYPE = "cluster_type"
ATTR_COMMAND_TYPE = "command_type"
ATTR_DEVICE_IEEE = "device_ieee"
ATTR_DEVICE_TYPE = "device_type"
ATTR_ENDPOINTS = "endpoints"
ATTR_ENDPOINT_NAMES = "endpoint_names"
ATTR_ENDPOINT_ID = "endpoint_id"
ATTR_IEEE = "ieee"
ATTR_IN_CLUSTERS = "in_clusters"
ATTR_LAST_SEEN = "last_seen"
ATTR_LEVEL = "level"
ATTR_LQI = "lqi"
ATTR_MANUFACTURER = "manufacturer"
ATTR_MANUFACTURER_CODE = "manufacturer_code"
ATTR_MEMBERS = "members"
ATTR_MODEL = "model"
ATTR_NEIGHBORS = "neighbors"
ATTR_NODE_DESCRIPTOR = "node_descriptor"
ATTR_NWK = "nwk"
ATTR_OUT_CLUSTERS = "out_clusters"
ATTR_POWER_SOURCE = "power_source"
ATTR_PROFILE_ID = "profile_id"
ATTR_QUIRK_APPLIED = "quirk_applied"
ATTR_QUIRK_CLASS = "quirk_class"
ATTR_RSSI = "rssi"
ATTR_SIGNATURE = "signature"
ATTR_TYPE = "type"
ATTR_UNIQUE_ID = "unique_id"
ATTR_VALUE = "value"
ATTR_WARNING_DEVICE_DURATION = "duration"
ATTR_WARNING_DEVICE_MODE = "mode"
ATTR_WARNING_DEVICE_STROBE = "strobe"
ATTR_WARNING_DEVICE_STROBE_DUTY_CYCLE = "duty_cycle"
ATTR_WARNING_DEVICE_STROBE_INTENSITY = "intensity"
BAUD_RATES = [2400, 4800, 9600, 14400, 19200, 38400, 57600, 115200, 128000, 256000]
BINDINGS = "bindings"
CHANNEL_ACCELEROMETER = "accelerometer"
CHANNEL_ANALOG_INPUT = "analog_input"
CHANNEL_ANALOG_OUTPUT = "analog_output"
CHANNEL_ATTRIBUTE = "attribute"
CHANNEL_BASIC = "basic"
CHANNEL_COLOR = "light_color"
CHANNEL_COVER = "window_covering"
CHANNEL_DOORLOCK = "door_lock"
CHANNEL_ELECTRICAL_MEASUREMENT = "electrical_measurement"
CHANNEL_EVENT_RELAY = "event_relay"
CHANNEL_FAN = "fan"
CHANNEL_HUMIDITY = "humidity"
CHANNEL_IAS_WD = "ias_wd"
CHANNEL_IDENTIFY = "identify"
CHANNEL_ILLUMINANCE = "illuminance"
CHANNEL_LEVEL = ATTR_LEVEL
CHANNEL_MULTISTATE_INPUT = "multistate_input"
CHANNEL_OCCUPANCY = "occupancy"
CHANNEL_ON_OFF = "on_off"
CHANNEL_POWER_CONFIGURATION = "power"
CHANNEL_PRESSURE = "pressure"
CHANNEL_SHADE = "shade"
CHANNEL_SMARTENERGY_METERING = "smartenergy_metering"
CHANNEL_TEMPERATURE = "temperature"
CHANNEL_THERMOSTAT = "thermostat"
CHANNEL_ZDO = "zdo"
CHANNEL_ZONE = ZONE = "ias_zone"
CLUSTER_COMMAND_SERVER = "server"
CLUSTER_COMMANDS_CLIENT = "client_commands"
CLUSTER_COMMANDS_SERVER = "server_commands"
CLUSTER_TYPE_IN = "in"
CLUSTER_TYPE_OUT = "out"
PLATFORMS = (
BINARY_SENSOR,
CLIMATE,
COVER,
DEVICE_TRACKER,
FAN,
LIGHT,
LOCK,
NUMBER,
SENSOR,
SWITCH,
)
CONF_BAUDRATE = "baudrate"
CONF_DATABASE = "database_path"
CONF_DEVICE_CONFIG = "device_config"
CONF_ENABLE_QUIRKS = "enable_quirks"
CONF_FLOWCONTROL = "flow_control"
CONF_RADIO_TYPE = "radio_type"
CONF_USB_PATH = "usb_path"
CONF_ZIGPY = "zigpy_config"
DATA_DEVICE_CONFIG = "zha_device_config"
DATA_ZHA = "zha"
DATA_ZHA_CONFIG = "config"
DATA_ZHA_BRIDGE_ID = "zha_bridge_id"
DATA_ZHA_CORE_EVENTS = "zha_core_events"
DATA_ZHA_DISPATCHERS = "zha_dispatchers"
DATA_ZHA_GATEWAY = "zha_gateway"
DATA_ZHA_PLATFORM_LOADED = "platform_loaded"
DEBUG_COMP_BELLOWS = "bellows"
DEBUG_COMP_ZHA = "homeassistant.components.zha"
DEBUG_COMP_ZIGPY = "zigpy"
DEBUG_COMP_ZIGPY_CC = "zigpy_cc"
DEBUG_COMP_ZIGPY_DECONZ = "zigpy_deconz"
DEBUG_COMP_ZIGPY_XBEE = "zigpy_xbee"
DEBUG_COMP_ZIGPY_ZIGATE = "zigpy_zigate"
DEBUG_LEVEL_CURRENT = "current"
DEBUG_LEVEL_ORIGINAL = "original"
DEBUG_LEVELS = {
DEBUG_COMP_BELLOWS: logging.DEBUG,
DEBUG_COMP_ZHA: logging.DEBUG,
DEBUG_COMP_ZIGPY: logging.DEBUG,
DEBUG_COMP_ZIGPY_CC: logging.DEBUG,
DEBUG_COMP_ZIGPY_DECONZ: logging.DEBUG,
DEBUG_COMP_ZIGPY_XBEE: logging.DEBUG,
DEBUG_COMP_ZIGPY_ZIGATE: logging.DEBUG,
}
DEBUG_RELAY_LOGGERS = [DEBUG_COMP_ZHA, DEBUG_COMP_ZIGPY]
DEFAULT_RADIO_TYPE = "ezsp"
DEFAULT_BAUDRATE = 57600
DEFAULT_DATABASE_NAME = "zigbee.db"
DEVICE_PAIRING_STATUS = "pairing_status"
DISCOVERY_KEY = "zha_discovery_info"
DOMAIN = "zha"
GROUP_ID = "group_id"
GROUP_IDS = "group_ids"
GROUP_NAME = "group_name"
MFG_CLUSTER_ID_START = 0xFC00
POWER_MAINS_POWERED = "Mains"
POWER_BATTERY_OR_UNKNOWN = "Battery or Unknown"
class RadioType(enum.Enum):
# pylint: disable=invalid-name
"""Possible options for radio type."""
znp = (
"ZNP = Texas Instruments Z-Stack ZNP protocol: CC253x, CC26x2, CC13x2",
zigpy_znp.zigbee.application.ControllerApplication,
)
ezsp = (
"EZSP = Silicon Labs EmberZNet protocol: Elelabs, HUSBZB-1, Telegesis",
bellows.zigbee.application.ControllerApplication,
)
deconz = (
"deCONZ = dresden elektronik deCONZ protocol: ConBee I/II, RaspBee I/II",
zigpy_deconz.zigbee.application.ControllerApplication,
)
ti_cc = (
"Legacy TI_CC = Texas Instruments Z-Stack ZNP protocol: CC253x, CC26x2, CC13x2",
zigpy_cc.zigbee.application.ControllerApplication,
)
zigate = (
"ZiGate = ZiGate Zigbee radios: PiZiGate, ZiGate USB-TTL, ZiGate WiFi",
zigpy_zigate.zigbee.application.ControllerApplication,
)
xbee = (
"XBee = Digi XBee Zigbee radios: Digi XBee Series 2, 2C, 3",
zigpy_xbee.zigbee.application.ControllerApplication,
)
@classmethod
def list(cls) -> List[str]:
"""Return a list of descriptions."""
return [e.description for e in RadioType]
@classmethod
def get_by_description(cls, description: str) -> str:
"""Get radio by description."""
for radio in cls:
if radio.description == description:
return radio.name
raise ValueError
def __init__(self, description: str, controller_cls: CALLABLE_T):
"""Init instance."""
self._desc = description
self._ctrl_cls = controller_cls
@property
def controller(self) -> CALLABLE_T:
"""Return controller class."""
return self._ctrl_cls
@property
def description(self) -> str:
"""Return radio type description."""
return self._desc
REPORT_CONFIG_MAX_INT = 900
REPORT_CONFIG_MAX_INT_BATTERY_SAVE = 10800
REPORT_CONFIG_MIN_INT = 30
REPORT_CONFIG_MIN_INT_ASAP = 1
REPORT_CONFIG_MIN_INT_IMMEDIATE = 0
REPORT_CONFIG_MIN_INT_OP = 5
REPORT_CONFIG_MIN_INT_BATTERY_SAVE = 3600
REPORT_CONFIG_RPT_CHANGE = 1
REPORT_CONFIG_DEFAULT = (
REPORT_CONFIG_MIN_INT,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_ASAP = (
REPORT_CONFIG_MIN_INT_ASAP,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_BATTERY_SAVE = (
REPORT_CONFIG_MIN_INT_BATTERY_SAVE,
REPORT_CONFIG_MAX_INT_BATTERY_SAVE,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_IMMEDIATE = (
REPORT_CONFIG_MIN_INT_IMMEDIATE,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_OP = (
REPORT_CONFIG_MIN_INT_OP,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
SENSOR_ACCELERATION = "acceleration"
SENSOR_BATTERY = "battery"
SENSOR_ELECTRICAL_MEASUREMENT = CHANNEL_ELECTRICAL_MEASUREMENT
SENSOR_GENERIC = "generic"
SENSOR_HUMIDITY = CHANNEL_HUMIDITY
SENSOR_ILLUMINANCE = CHANNEL_ILLUMINANCE
SENSOR_METERING = "metering"
SENSOR_OCCUPANCY = CHANNEL_OCCUPANCY
SENSOR_OPENING = "opening"
SENSOR_PRESSURE = CHANNEL_PRESSURE
SENSOR_TEMPERATURE = CHANNEL_TEMPERATURE
SENSOR_TYPE = "sensor_type"
SIGNAL_ADD_ENTITIES = "zha_add_new_entities"
SIGNAL_ATTR_UPDATED = "attribute_updated"
SIGNAL_AVAILABLE = "available"
SIGNAL_MOVE_LEVEL = "move_level"
SIGNAL_REMOVE = "remove"
SIGNAL_SET_LEVEL = "set_level"
SIGNAL_STATE_ATTR = "update_state_attribute"
SIGNAL_UPDATE_DEVICE = "{}_zha_update_device"
SIGNAL_GROUP_ENTITY_REMOVED = "group_entity_removed"
SIGNAL_GROUP_MEMBERSHIP_CHANGE = "group_membership_change"
UNKNOWN = "unknown"
UNKNOWN_MANUFACTURER = "unk_manufacturer"
UNKNOWN_MODEL = "unk_model"
WARNING_DEVICE_MODE_STOP = 0
WARNING_DEVICE_MODE_BURGLAR = 1
WARNING_DEVICE_MODE_FIRE = 2
WARNING_DEVICE_MODE_EMERGENCY = 3
WARNING_DEVICE_MODE_POLICE_PANIC = 4
WARNING_DEVICE_MODE_FIRE_PANIC = 5
WARNING_DEVICE_MODE_EMERGENCY_PANIC = 6
WARNING_DEVICE_STROBE_NO = 0
WARNING_DEVICE_STROBE_YES = 1
WARNING_DEVICE_SOUND_LOW = 0
WARNING_DEVICE_SOUND_MEDIUM = 1
WARNING_DEVICE_SOUND_HIGH = 2
WARNING_DEVICE_SOUND_VERY_HIGH = 3
WARNING_DEVICE_STROBE_LOW = 0x00
WARNING_DEVICE_STROBE_MEDIUM = 0x01
WARNING_DEVICE_STROBE_HIGH = 0x02
WARNING_DEVICE_STROBE_VERY_HIGH = 0x03
WARNING_DEVICE_SQUAWK_MODE_ARMED = 0
WARNING_DEVICE_SQUAWK_MODE_DISARMED = 1
ZHA_DISCOVERY_NEW = "zha_discovery_new_{}"
ZHA_GW_MSG = "zha_gateway_message"
ZHA_GW_MSG_DEVICE_FULL_INIT = "device_fully_initialized"
ZHA_GW_MSG_DEVICE_INFO = "device_info"
ZHA_GW_MSG_DEVICE_JOINED = "device_joined"
ZHA_GW_MSG_DEVICE_REMOVED = "device_removed"
ZHA_GW_MSG_GROUP_ADDED = "group_added"
ZHA_GW_MSG_GROUP_INFO = "group_info"
ZHA_GW_MSG_GROUP_MEMBER_ADDED = "group_member_added"
ZHA_GW_MSG_GROUP_MEMBER_REMOVED = "group_member_removed"
ZHA_GW_MSG_GROUP_REMOVED = "group_removed"
ZHA_GW_MSG_LOG_ENTRY = "log_entry"
ZHA_GW_MSG_LOG_OUTPUT = "log_output"
ZHA_GW_MSG_RAW_INIT = "raw_device_initialized"
EFFECT_BLINK = 0x00
EFFECT_BREATHE = 0x01
EFFECT_OKAY = 0x02
EFFECT_DEFAULT_VARIANT = 0x00
| mit | -281,145,349,148,930,180 | 8,625,730,443,760,007,000 | 29.321534 | 88 | 0.729254 | false |
bsmedberg/socorro | webapp-django/bin/linting.py | 2 | 1511 | #!/usr/bin/env python
"""
Use like this:
find somedir | xargs check.py | python linting.py
or:
check.py somedir | python linting.py
or:
git ls-files somedir | python linting.py
"""
import os
import sys
# Enter any part of a warning that we deem OK.
# It can be a pep8 warning error code or any other part of a string.
#
# NOTE! Be as specific as you possibly can!
# Only blanket whole files if you desperately have to
#
EXCEPTIONS = (
# has a exceptional use of `...import *`
'settings/base.py:4:',
# has a well known `...import *` trick that we like
'settings/__init__.py',
# all downloaded libs to be ignored
'/js/lib/',
# See https://bugzilla.mozilla.org/show_bug.cgi?id=997270
'/js/jquery/',
'/js/flot',
'/js/timeago/',
'jquery.tablesorter.min.js',
'async-local-storage-with-Promise.min.js',
'underscore-min.js',
'moment.min.js',
'jquery.metadata.js',
)
EXTENSIONS_ONLY = (
'.py',
# commented out until we clean up our .js files
# See https://bugzilla.mozilla.org/show_bug.cgi?id=997272
# '.js'
)
def main():
errors = 0
for line in sys.stdin:
if not line.strip():
continue
_, ext = os.path.splitext(line.split(':')[0])
if ext not in EXTENSIONS_ONLY:
continue
if [f for f in EXCEPTIONS if f in line]:
continue
errors += 1
sys.stderr.write(line)
return errors
if __name__ == '__main__':
sys.exit(main())
| mpl-2.0 | -3,203,063,352,067,916,300 | -4,978,778,281,336,305,000 | 19.986111 | 68 | 0.599603 | false |
ericholscher/django-tastypie | tastypie/validation.py | 47 | 3685 | from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured
from django.forms import ModelForm
from django.forms.models import model_to_dict
class Validation(object):
"""
A basic validation stub that does no validation.
"""
def __init__(self, **kwargs):
pass
def is_valid(self, bundle, request=None):
"""
Performs a check on the data within the bundle (and optionally the
request) to ensure it is valid.
Should return a dictionary of error messages. If the dictionary has
zero items, the data is considered valid. If there are errors, keys
in the dictionary should be field names and the values should be a list
of errors, even if there is only one.
"""
return {}
class FormValidation(Validation):
"""
A validation class that uses a Django ``Form`` to validate the data.
This class **DOES NOT** alter the data sent, only verifies it. If you
want to alter the data, please use the ``CleanedDataFormValidation`` class
instead.
This class requires a ``form_class`` argument, which should be a Django
``Form`` (or ``ModelForm``, though ``save`` will never be called) class.
This form will be used to validate the data in ``bundle.data``.
"""
def __init__(self, **kwargs):
if not 'form_class' in kwargs:
raise ImproperlyConfigured("You must provide a 'form_class' to 'FormValidation' classes.")
self.form_class = kwargs.pop('form_class')
super(FormValidation, self).__init__(**kwargs)
def form_args(self, bundle):
data = bundle.data
# Ensure we get a bound Form, regardless of the state of the bundle.
if data is None:
data = {}
kwargs = {'data': {}}
if hasattr(bundle.obj, 'pk'):
if issubclass(self.form_class, ModelForm):
kwargs['instance'] = bundle.obj
kwargs['data'] = model_to_dict(bundle.obj)
kwargs['data'].update(data)
return kwargs
def is_valid(self, bundle, request=None):
"""
Performs a check on ``bundle.data``to ensure it is valid.
If the form is valid, an empty list (all valid) will be returned. If
not, a list of errors will be returned.
"""
form = self.form_class(**self.form_args(bundle))
if form.is_valid():
return {}
# The data is invalid. Let's collect all the error messages & return
# them.
return form.errors
class CleanedDataFormValidation(FormValidation):
"""
A validation class that uses a Django ``Form`` to validate the data.
This class **ALTERS** data sent by the user!!!
This class requires a ``form_class`` argument, which should be a Django
``Form`` (or ``ModelForm``, though ``save`` will never be called) class.
This form will be used to validate the data in ``bundle.data``.
"""
def is_valid(self, bundle, request=None):
"""
Checks ``bundle.data``to ensure it is valid & replaces it with the
cleaned results.
If the form is valid, an empty list (all valid) will be returned. If
not, a list of errors will be returned.
"""
form = self.form_class(**self.form_args(bundle))
if form.is_valid():
# We're different here & relying on having a reference to the same
# bundle the rest of the process is using.
bundle.data = form.cleaned_data
return {}
# The data is invalid. Let's collect all the error messages & return
# them.
return form.errors
| bsd-3-clause | -4,559,692,870,145,243,600 | -3,698,710,543,214,422,500 | 32.5 | 102 | 0.61981 | false |
suninsky/ReceiptOCR | Python/server/lib/python2.7/site-packages/selenium/webdriver/safari/service.py | 31 | 1854 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
from selenium.webdriver.common import service, utils
from subprocess import PIPE
class Service(service.Service):
"""
Object that manages the starting and stopping of the SafariDriver
"""
def __init__(self, executable_path, port=0, quiet=False):
"""
Creates a new instance of the Service
:Args:
- executable_path : Path to the SafariDriver
- port : Port the service is running on """
if not os.path.exists(executable_path):
raise Exception("SafariDriver requires Safari 10 on OSX El Capitan or greater")
if port == 0:
port = utils.free_port()
self.quiet = quiet
log = PIPE
if quiet:
log = open(os.devnull, 'w')
service.Service.__init__(self, executable_path, port, log)
def command_line_args(self):
return ["-p", "%s" % self.port]
@property
def service_url(self):
"""
Gets the url of the SafariDriver Service
"""
return "http://localhost:%d" % self.port
| mit | -2,613,016,282,019,282,400 | 6,582,736,163,305,027,000 | 32.107143 | 91 | 0.668824 | false |
G33KS44n/mysql-5.6 | xtrabackup/test/kewpie/percona_tests/xtrabackup_disabled/bug766607_test.py | 24 | 8243 | #! /usr/bin/env python
# -*- mode: python; indent-tabs-mode: nil; -*-
# vim:expandtab:shiftwidth=2:tabstop=2:smarttab:
#
# Copyright (C) 2011 Patrick Crews
#
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import os
import shutil
from lib.util.mysqlBaseTestCase import mysqlBaseTestCase
server_requirements = [['--innodb-file-per-table']]
servers = []
server_manager = None
test_executor = None
# we explicitly use the --no-timestamp option
# here. We will be using a generic / vanilla backup dir
backup_path = None
class basicTest(mysqlBaseTestCase):
def setUp(self):
master_server = servers[0] # assumption that this is 'master'
backup_path = os.path.join(master_server.vardir, 'full_backup')
inc_backup_path = os.path.join(master_server.vardir, 'inc_backup')
# remove backup paths
for del_path in [backup_path, inc_backup_path]:
if os.path.exists(del_path):
shutil.rmtree(del_path)
def load_table(self, table_name, row_count, server):
queries = []
for i in range(row_count):
queries.append("INSERT INTO %s VALUES (%d, %d)" %(table_name,i, row_count))
retcode, result = self.execute_queries(queries, server)
self.assertEqual(retcode, 0, msg=result)
def test_ib_incremental(self):
self.servers = servers
logging = test_executor.logging
if servers[0].type not in ['mysql','percona']:
return
else:
innobackupex = test_executor.system_manager.innobackupex_path
xtrabackup = test_executor.system_manager.xtrabackup_path
master_server = servers[0] # assumption that this is 'master'
backup_path = os.path.join(master_server.vardir, 'full_backup')
inc_backup_path = os.path.join(master_server.vardir, 'inc_backup')
output_path = os.path.join(master_server.vardir, 'innobackupex.out')
exec_path = os.path.dirname(innobackupex)
table_name = "`test`"
# populate our server with a test bed
queries = ["DROP TABLE IF EXISTS %s" %(table_name)
,("CREATE TABLE %s "
"(`a` int(11) DEFAULT NULL, "
"`number` int(11) DEFAULT NULL) "
" ENGINE=InnoDB DEFAULT CHARSET=latin1"
%(table_name)
)
]
retcode, result = self.execute_queries(queries, master_server)
self.assertEqual(retcode, 0, msg = result)
row_count = 100
self.load_table(table_name, row_count, master_server)
# take a backup
cmd = [ xtrabackup
, "--defaults-file=%s" %master_server.cnf_file
, "--datadir=%s" %master_server.datadir
, "--backup"
, "--target-dir=%s" %backup_path
]
cmd = " ".join(cmd)
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertTrue(retcode==0,output)
queries = [ "CREATE TABLE t(a int) ENGINE=InnoDB"
, "INSERT INTO t VALUES (1), (2), (3)"
, "FLUSH LOGS"
]
retcode, result = self.execute_queries(queries, master_server)
self.assertEqual(retcode,0,msg=result)
# stop / restart the server
master_server.stop()
master_server.start()
# Take an incremental backup
cmd = [ xtrabackup
, "--defaults-file=%s" %master_server.cnf_file
, "--datadir=%s" %master_server.datadir
, "--backup"
, "--target-dir=%s" %inc_backup_path
, "--incremental-basedir=%s" %backup_path
]
cmd = " ".join(cmd)
logging.test_debug(cmd)
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertEqual(retcode,0,output)
# shutdown our server
master_server.stop()
# prepare our main backup
cmd = [ xtrabackup
, "--prepare"
, "--apply-log-only"
, "--datadir=%s" %master_server.datadir
, "--use-memory=500M"
, "--target-dir=%s" %backup_path
]
cmd = " ".join(cmd)
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertTrue(retcode==0,output)
# prepare our incremental backup
cmd = [ xtrabackup
, "--prepare"
, "--apply-log-only"
, "--datadir=%s" %master_server.datadir
, "--use-memory=500M"
, "--target-dir=%s" %backup_path
, "--incremental-dir=%s" %(inc_backup_path)
]
cmd = " ".join(cmd)
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertTrue(retcode==0,output)
# do final prepare on main backup
cmd = [ xtrabackup
, "--prepare"
, "--datadir=%s" %master_server.datadir
, "--use-memory=500M"
, "--target-dir=%s" %backup_path
]
cmd = " ".join(cmd)
retcode, output = self.execute_cmd(cmd, output_path, exec_path, True)
self.assertTrue(retcode==0,output)
# copy our data files back
for root, dirs, files in os.walk(backup_path):
if files:
file_info = root.split(backup_path)[1]
for file_name in files:
# We do a quick check to make sure
# no names start with '/' as os.path
# throws a hissy when it sees such things
if file_info.startswith('/'):
file_info = file_info[1:]
if file_name.startswith('/'):
file_name = file_name[1:]
to_path = os.path.join(master_server.datadir
, file_info
, file_name)
new_dir = os.path.dirname(to_path)
try:
if not os.path.exists(new_dir):
os.makedirs(new_dir)
except OSError, e:
logging.error("Could not create directory: %s | %s" %(new_dir, e))
try:
shutil.copy(os.path.join(root,file_name),to_path)
except IOError, e:
logging.error( "ERROR: Could not copy file: %s | %s" %(file_name, e))
# restart server (and ensure it doesn't crash)
master_server.start()
self.assertTrue(master_server.status==1, 'Server failed restart from restored datadir...')
# Get a checksum for our table
query = "SELECT * FROM t"
retcode, result = self.execute_query(query, master_server)
self.assertEqual(retcode, 0, msg=result)
expected_result= ((1L,), (2L,), (3L,))
self.assertEqual(result, expected_result, msg = "%s || %s" %(expected_result, result))
| gpl-2.0 | -4,272,080,064,107,729,000 | 3,008,483,605,087,156,000 | 41.489691 | 102 | 0.519592 | false |
cnits/CnitSymfony | vendor/doctrine/orm/docs/en/conf.py | 2448 | 6497 | # -*- coding: utf-8 -*-
#
# Doctrine 2 ORM documentation build configuration file, created by
# sphinx-quickstart on Fri Dec 3 18:10:24 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath('_exts'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['configurationblock']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Doctrine 2 ORM'
copyright = u'2010-12, Doctrine Project Team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2'
# The full version, including alpha/beta/rc tags.
release = '2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'doctrine'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_theme']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'Doctrine2ORMdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Doctrine2ORM.tex', u'Doctrine 2 ORM Documentation',
u'Doctrine Project Team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
primary_domain = "dcorm"
def linkcode_resolve(domain, info):
if domain == 'dcorm':
return 'http://'
return None
| mit | 5,850,316,531,350,188,000 | -1,009,817,731,272,733,200 | 31.323383 | 80 | 0.711867 | false |
JijonHyuni/HyperKernel-JB | tools/perf/scripts/python/netdev-times.py | 11271 | 15048 | # Display a process of packets and processed time.
# It helps us to investigate networking or network device.
#
# options
# tx: show only tx chart
# rx: show only rx chart
# dev=: show only thing related to specified device
# debug: work with debug mode. It shows buffer status.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
all_event_list = []; # insert all tracepoint event related with this script
irq_dic = {}; # key is cpu and value is a list which stacks irqs
# which raise NET_RX softirq
net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry
# and a list which stacks receive
receive_hunk_list = []; # a list which include a sequence of receive events
rx_skb_list = []; # received packet list for matching
# skb_copy_datagram_iovec
buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and
# tx_xmit_list
of_count_rx_skb_list = 0; # overflow count
tx_queue_list = []; # list of packets which pass through dev_queue_xmit
of_count_tx_queue_list = 0; # overflow count
tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit
of_count_tx_xmit_list = 0; # overflow count
tx_free_list = []; # list of packets which is freed
# options
show_tx = 0;
show_rx = 0;
dev = 0; # store a name of device specified by option "dev="
debug = 0;
# indices of event_info tuple
EINFO_IDX_NAME= 0
EINFO_IDX_CONTEXT=1
EINFO_IDX_CPU= 2
EINFO_IDX_TIME= 3
EINFO_IDX_PID= 4
EINFO_IDX_COMM= 5
# Calculate a time interval(msec) from src(nsec) to dst(nsec)
def diff_msec(src, dst):
return (dst - src) / 1000000.0
# Display a process of transmitting a packet
def print_transmit(hunk):
if dev != 0 and hunk['dev'].find(dev) < 0:
return
print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \
(hunk['dev'], hunk['len'],
nsecs_secs(hunk['queue_t']),
nsecs_nsecs(hunk['queue_t'])/1000,
diff_msec(hunk['queue_t'], hunk['xmit_t']),
diff_msec(hunk['xmit_t'], hunk['free_t']))
# Format for displaying rx packet processing
PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)"
PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)"
PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)"
PF_JOINT= " |"
PF_WJOINT= " | |"
PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)"
PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)"
PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)"
PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)"
PF_CONS_SKB= " | consume_skb(+%.3fmsec)"
# Display a process of received packets and interrputs associated with
# a NET_RX softirq
def print_receive(hunk):
show_hunk = 0
irq_list = hunk['irq_list']
cpu = irq_list[0]['cpu']
base_t = irq_list[0]['irq_ent_t']
# check if this hunk should be showed
if dev != 0:
for i in range(len(irq_list)):
if irq_list[i]['name'].find(dev) >= 0:
show_hunk = 1
break
else:
show_hunk = 1
if show_hunk == 0:
return
print "%d.%06dsec cpu=%d" % \
(nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu)
for i in range(len(irq_list)):
print PF_IRQ_ENTRY % \
(diff_msec(base_t, irq_list[i]['irq_ent_t']),
irq_list[i]['irq'], irq_list[i]['name'])
print PF_JOINT
irq_event_list = irq_list[i]['event_list']
for j in range(len(irq_event_list)):
irq_event = irq_event_list[j]
if irq_event['event'] == 'netif_rx':
print PF_NET_RX % \
(diff_msec(base_t, irq_event['time']),
irq_event['skbaddr'])
print PF_JOINT
print PF_SOFT_ENTRY % \
diff_msec(base_t, hunk['sirq_ent_t'])
print PF_JOINT
event_list = hunk['event_list']
for i in range(len(event_list)):
event = event_list[i]
if event['event_name'] == 'napi_poll':
print PF_NAPI_POLL % \
(diff_msec(base_t, event['event_t']), event['dev'])
if i == len(event_list) - 1:
print ""
else:
print PF_JOINT
else:
print PF_NET_RECV % \
(diff_msec(base_t, event['event_t']), event['skbaddr'],
event['len'])
if 'comm' in event.keys():
print PF_WJOINT
print PF_CPY_DGRAM % \
(diff_msec(base_t, event['comm_t']),
event['pid'], event['comm'])
elif 'handle' in event.keys():
print PF_WJOINT
if event['handle'] == "kfree_skb":
print PF_KFREE_SKB % \
(diff_msec(base_t,
event['comm_t']),
event['location'])
elif event['handle'] == "consume_skb":
print PF_CONS_SKB % \
diff_msec(base_t,
event['comm_t'])
print PF_JOINT
def trace_begin():
global show_tx
global show_rx
global dev
global debug
for i in range(len(sys.argv)):
if i == 0:
continue
arg = sys.argv[i]
if arg == 'tx':
show_tx = 1
elif arg =='rx':
show_rx = 1
elif arg.find('dev=',0, 4) >= 0:
dev = arg[4:]
elif arg == 'debug':
debug = 1
if show_tx == 0 and show_rx == 0:
show_tx = 1
show_rx = 1
def trace_end():
# order all events in time
all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME],
b[EINFO_IDX_TIME]))
# process all events
for i in range(len(all_event_list)):
event_info = all_event_list[i]
name = event_info[EINFO_IDX_NAME]
if name == 'irq__softirq_exit':
handle_irq_softirq_exit(event_info)
elif name == 'irq__softirq_entry':
handle_irq_softirq_entry(event_info)
elif name == 'irq__softirq_raise':
handle_irq_softirq_raise(event_info)
elif name == 'irq__irq_handler_entry':
handle_irq_handler_entry(event_info)
elif name == 'irq__irq_handler_exit':
handle_irq_handler_exit(event_info)
elif name == 'napi__napi_poll':
handle_napi_poll(event_info)
elif name == 'net__netif_receive_skb':
handle_netif_receive_skb(event_info)
elif name == 'net__netif_rx':
handle_netif_rx(event_info)
elif name == 'skb__skb_copy_datagram_iovec':
handle_skb_copy_datagram_iovec(event_info)
elif name == 'net__net_dev_queue':
handle_net_dev_queue(event_info)
elif name == 'net__net_dev_xmit':
handle_net_dev_xmit(event_info)
elif name == 'skb__kfree_skb':
handle_kfree_skb(event_info)
elif name == 'skb__consume_skb':
handle_consume_skb(event_info)
# display receive hunks
if show_rx:
for i in range(len(receive_hunk_list)):
print_receive(receive_hunk_list[i])
# display transmit hunks
if show_tx:
print " dev len Qdisc " \
" netdevice free"
for i in range(len(tx_free_list)):
print_transmit(tx_free_list[i])
if debug:
print "debug buffer status"
print "----------------------------"
print "xmit Qdisc:remain:%d overflow:%d" % \
(len(tx_queue_list), of_count_tx_queue_list)
print "xmit netdevice:remain:%d overflow:%d" % \
(len(tx_xmit_list), of_count_tx_xmit_list)
print "receive:remain:%d overflow:%d" % \
(len(rx_skb_list), of_count_rx_skb_list)
# called from perf, when it finds a correspoinding event
def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm,
irq, irq_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
irq, irq_name)
all_event_list.append(event_info)
def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret)
all_event_list.append(event_info)
def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
napi, dev_name)
all_event_list.append(event_info)
def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, rc, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, rc ,dev_name)
all_event_list.append(event_info)
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, protocol, location)
all_event_list.append(event_info)
def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr)
all_event_list.append(event_info)
def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen)
all_event_list.append(event_info)
def handle_irq_handler_entry(event_info):
(name, context, cpu, time, pid, comm, irq, irq_name) = event_info
if cpu not in irq_dic.keys():
irq_dic[cpu] = []
irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time}
irq_dic[cpu].append(irq_record)
def handle_irq_handler_exit(event_info):
(name, context, cpu, time, pid, comm, irq, ret) = event_info
if cpu not in irq_dic.keys():
return
irq_record = irq_dic[cpu].pop()
if irq != irq_record['irq']:
return
irq_record.update({'irq_ext_t':time})
# if an irq doesn't include NET_RX softirq, drop.
if 'event_list' in irq_record.keys():
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_raise(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'sirq_raise'})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_entry(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]}
def handle_irq_softirq_exit(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
irq_list = []
event_list = 0
if cpu in irq_dic.keys():
irq_list = irq_dic[cpu]
del irq_dic[cpu]
if cpu in net_rx_dic.keys():
sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t']
event_list = net_rx_dic[cpu]['event_list']
del net_rx_dic[cpu]
if irq_list == [] or event_list == 0:
return
rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time,
'irq_list':irq_list, 'event_list':event_list}
# merge information realted to a NET_RX softirq
receive_hunk_list.append(rec_data)
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name) = event_info
if cpu in net_rx_dic.keys():
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name':'napi_poll',
'dev':dev_name, 'event_t':time}
event_list.append(rec_data)
def handle_netif_rx(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'netif_rx',
'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_netif_receive_skb(event_info):
global of_count_rx_skb_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu in net_rx_dic.keys():
rec_data = {'event_name':'netif_receive_skb',
'event_t':time, 'skbaddr':skbaddr, 'len':skblen}
event_list = net_rx_dic[cpu]['event_list']
event_list.append(rec_data)
rx_skb_list.insert(0, rec_data)
if len(rx_skb_list) > buffer_budget:
rx_skb_list.pop()
of_count_rx_skb_list += 1
def handle_net_dev_queue(event_info):
global of_count_tx_queue_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time}
tx_queue_list.insert(0, skb)
if len(tx_queue_list) > buffer_budget:
tx_queue_list.pop()
of_count_tx_queue_list += 1
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, rc, dev_name) = event_info
if rc == 0: # NETDEV_TX_OK
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
skb['xmit_t'] = time
tx_xmit_list.insert(0, skb)
del tx_queue_list[i]
if len(tx_xmit_list) > buffer_budget:
tx_xmit_list.pop()
of_count_tx_xmit_list += 1
return
def handle_kfree_skb(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, protocol, location) = event_info
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
del tx_queue_list[i]
return
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if rec_data['skbaddr'] == skbaddr:
rec_data.update({'handle':"kfree_skb",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
def handle_consume_skb(event_info):
(name, context, cpu, time, pid, comm, skbaddr) = event_info
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
def handle_skb_copy_datagram_iovec(event_info):
(name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if skbaddr == rec_data['skbaddr']:
rec_data.update({'handle':"skb_copy_datagram_iovec",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
| gpl-2.0 | 3,718,895,219,749,400,600 | -869,637,990,571,529,100 | 31.431034 | 78 | 0.645335 | false |
badloop/SickRage | lib/tornado/concurrent.py | 35 | 18000 | #!/usr/bin/env python
#
# Copyright 2012 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utilities for working with threads and ``Futures``.
``Futures`` are a pattern for concurrent programming introduced in
Python 3.2 in the `concurrent.futures` package (this package has also
been backported to older versions of Python and can be installed with
``pip install futures``). Tornado will use `concurrent.futures.Future` if
it is available; otherwise it will use a compatible class defined in this
module.
"""
from __future__ import absolute_import, division, print_function, with_statement
import functools
import platform
import traceback
import sys
from tornado.log import app_log
from tornado.stack_context import ExceptionStackContext, wrap
from tornado.util import raise_exc_info, ArgReplacer
try:
from concurrent import futures
except ImportError:
futures = None
# Can the garbage collector handle cycles that include __del__ methods?
# This is true in cpython beginning with version 3.4 (PEP 442).
_GC_CYCLE_FINALIZERS = (platform.python_implementation() == 'CPython' and
sys.version_info >= (3, 4))
class ReturnValueIgnoredError(Exception):
pass
# This class and associated code in the future object is derived
# from the Trollius project, a backport of asyncio to Python 2.x - 3.x
class _TracebackLogger(object):
"""Helper to log a traceback upon destruction if not cleared.
This solves a nasty problem with Futures and Tasks that have an
exception set: if nobody asks for the exception, the exception is
never logged. This violates the Zen of Python: 'Errors should
never pass silently. Unless explicitly silenced.'
However, we don't want to log the exception as soon as
set_exception() is called: if the calling code is written
properly, it will get the exception and handle it properly. But
we *do* want to log it if result() or exception() was never called
-- otherwise developers waste a lot of time wondering why their
buggy code fails silently.
An earlier attempt added a __del__() method to the Future class
itself, but this backfired because the presence of __del__()
prevents garbage collection from breaking cycles. A way out of
this catch-22 is to avoid having a __del__() method on the Future
class itself, but instead to have a reference to a helper object
with a __del__() method that logs the traceback, where we ensure
that the helper object doesn't participate in cycles, and only the
Future has a reference to it.
The helper object is added when set_exception() is called. When
the Future is collected, and the helper is present, the helper
object is also collected, and its __del__() method will log the
traceback. When the Future's result() or exception() method is
called (and a helper object is present), it removes the the helper
object, after calling its clear() method to prevent it from
logging.
One downside is that we do a fair amount of work to extract the
traceback from the exception, even when it is never logged. It
would seem cheaper to just store the exception object, but that
references the traceback, which references stack frames, which may
reference the Future, which references the _TracebackLogger, and
then the _TracebackLogger would be included in a cycle, which is
what we're trying to avoid! As an optimization, we don't
immediately format the exception; we only do the work when
activate() is called, which call is delayed until after all the
Future's callbacks have run. Since usually a Future has at least
one callback (typically set by 'yield From') and usually that
callback extracts the callback, thereby removing the need to
format the exception.
PS. I don't claim credit for this solution. I first heard of it
in a discussion about closing files when they are collected.
"""
__slots__ = ('exc_info', 'formatted_tb')
def __init__(self, exc_info):
self.exc_info = exc_info
self.formatted_tb = None
def activate(self):
exc_info = self.exc_info
if exc_info is not None:
self.exc_info = None
self.formatted_tb = traceback.format_exception(*exc_info)
def clear(self):
self.exc_info = None
self.formatted_tb = None
def __del__(self):
if self.formatted_tb:
app_log.error('Future exception was never retrieved: %s',
''.join(self.formatted_tb).rstrip())
class Future(object):
"""Placeholder for an asynchronous result.
A ``Future`` encapsulates the result of an asynchronous
operation. In synchronous applications ``Futures`` are used
to wait for the result from a thread or process pool; in
Tornado they are normally used with `.IOLoop.add_future` or by
yielding them in a `.gen.coroutine`.
`tornado.concurrent.Future` is similar to
`concurrent.futures.Future`, but not thread-safe (and therefore
faster for use with single-threaded event loops).
In addition to ``exception`` and ``set_exception``, methods ``exc_info``
and ``set_exc_info`` are supported to capture tracebacks in Python 2.
The traceback is automatically available in Python 3, but in the
Python 2 futures backport this information is discarded.
This functionality was previously available in a separate class
``TracebackFuture``, which is now a deprecated alias for this class.
.. versionchanged:: 4.0
`tornado.concurrent.Future` is always a thread-unsafe ``Future``
with support for the ``exc_info`` methods. Previously it would
be an alias for the thread-safe `concurrent.futures.Future`
if that package was available and fall back to the thread-unsafe
implementation if it was not.
.. versionchanged:: 4.1
If a `.Future` contains an error but that error is never observed
(by calling ``result()``, ``exception()``, or ``exc_info()``),
a stack trace will be logged when the `.Future` is garbage collected.
This normally indicates an error in the application, but in cases
where it results in undesired logging it may be necessary to
suppress the logging by ensuring that the exception is observed:
``f.add_done_callback(lambda f: f.exception())``.
"""
def __init__(self):
self._done = False
self._result = None
self._exc_info = None
self._log_traceback = False # Used for Python >= 3.4
self._tb_logger = None # Used for Python <= 3.3
self._callbacks = []
def cancel(self):
"""Cancel the operation, if possible.
Tornado ``Futures`` do not support cancellation, so this method always
returns False.
"""
return False
def cancelled(self):
"""Returns True if the operation has been cancelled.
Tornado ``Futures`` do not support cancellation, so this method
always returns False.
"""
return False
def running(self):
"""Returns True if this operation is currently running."""
return not self._done
def done(self):
"""Returns True if the future has finished running."""
return self._done
def _clear_tb_log(self):
self._log_traceback = False
if self._tb_logger is not None:
self._tb_logger.clear()
self._tb_logger = None
def result(self, timeout=None):
"""If the operation succeeded, return its result. If it failed,
re-raise its exception.
This method takes a ``timeout`` argument for compatibility with
`concurrent.futures.Future` but it is an error to call it
before the `Future` is done, so the ``timeout`` is never used.
"""
self._clear_tb_log()
if self._result is not None:
return self._result
if self._exc_info is not None:
raise_exc_info(self._exc_info)
self._check_done()
return self._result
def exception(self, timeout=None):
"""If the operation raised an exception, return the `Exception`
object. Otherwise returns None.
This method takes a ``timeout`` argument for compatibility with
`concurrent.futures.Future` but it is an error to call it
before the `Future` is done, so the ``timeout`` is never used.
"""
self._clear_tb_log()
if self._exc_info is not None:
return self._exc_info[1]
else:
self._check_done()
return None
def add_done_callback(self, fn):
"""Attaches the given callback to the `Future`.
It will be invoked with the `Future` as its argument when the Future
has finished running and its result is available. In Tornado
consider using `.IOLoop.add_future` instead of calling
`add_done_callback` directly.
"""
if self._done:
fn(self)
else:
self._callbacks.append(fn)
def set_result(self, result):
"""Sets the result of a ``Future``.
It is undefined to call any of the ``set`` methods more than once
on the same object.
"""
self._result = result
self._set_done()
def set_exception(self, exception):
"""Sets the exception of a ``Future.``"""
self.set_exc_info(
(exception.__class__,
exception,
getattr(exception, '__traceback__', None)))
def exc_info(self):
"""Returns a tuple in the same format as `sys.exc_info` or None.
.. versionadded:: 4.0
"""
self._clear_tb_log()
return self._exc_info
def set_exc_info(self, exc_info):
"""Sets the exception information of a ``Future.``
Preserves tracebacks on Python 2.
.. versionadded:: 4.0
"""
self._exc_info = exc_info
self._log_traceback = True
if not _GC_CYCLE_FINALIZERS:
self._tb_logger = _TracebackLogger(exc_info)
try:
self._set_done()
finally:
# Activate the logger after all callbacks have had a
# chance to call result() or exception().
if self._log_traceback and self._tb_logger is not None:
self._tb_logger.activate()
self._exc_info = exc_info
def _check_done(self):
if not self._done:
raise Exception("DummyFuture does not support blocking for results")
def _set_done(self):
self._done = True
for cb in self._callbacks:
try:
cb(self)
except Exception:
app_log.exception('Exception in callback %r for %r',
cb, self)
self._callbacks = None
# On Python 3.3 or older, objects with a destructor part of a reference
# cycle are never destroyed. It's no longer the case on Python 3.4 thanks to
# the PEP 442.
if _GC_CYCLE_FINALIZERS:
def __del__(self):
if not self._log_traceback:
# set_exception() was not called, or result() or exception()
# has consumed the exception
return
tb = traceback.format_exception(*self._exc_info)
app_log.error('Future %r exception was never retrieved: %s',
self, ''.join(tb).rstrip())
TracebackFuture = Future
if futures is None:
FUTURES = Future
else:
FUTURES = (futures.Future, Future)
def is_future(x):
return isinstance(x, FUTURES)
class DummyExecutor(object):
def submit(self, fn, *args, **kwargs):
future = TracebackFuture()
try:
future.set_result(fn(*args, **kwargs))
except Exception:
future.set_exc_info(sys.exc_info())
return future
def shutdown(self, wait=True):
pass
dummy_executor = DummyExecutor()
def run_on_executor(*args, **kwargs):
"""Decorator to run a synchronous method asynchronously on an executor.
The decorated method may be called with a ``callback`` keyword
argument and returns a future.
The `.IOLoop` and executor to be used are determined by the ``io_loop``
and ``executor`` attributes of ``self``. To use different attributes,
pass keyword arguments to the decorator::
@run_on_executor(executor='_thread_pool')
def foo(self):
pass
.. versionchanged:: 4.2
Added keyword arguments to use alternative attributes.
"""
def run_on_executor_decorator(fn):
executor = kwargs.get("executor", "executor")
io_loop = kwargs.get("io_loop", "io_loop")
@functools.wraps(fn)
def wrapper(self, *args, **kwargs):
callback = kwargs.pop("callback", None)
future = getattr(self, executor).submit(fn, self, *args, **kwargs)
if callback:
getattr(self, io_loop).add_future(
future, lambda future: callback(future.result()))
return future
return wrapper
if args and kwargs:
raise ValueError("cannot combine positional and keyword args")
if len(args) == 1:
return run_on_executor_decorator(args[0])
elif len(args) != 0:
raise ValueError("expected 1 argument, got %d", len(args))
return run_on_executor_decorator
_NO_RESULT = object()
def return_future(f):
"""Decorator to make a function that returns via callback return a
`Future`.
The wrapped function should take a ``callback`` keyword argument
and invoke it with one argument when it has finished. To signal failure,
the function can simply raise an exception (which will be
captured by the `.StackContext` and passed along to the ``Future``).
From the caller's perspective, the callback argument is optional.
If one is given, it will be invoked when the function is complete
with `Future.result()` as an argument. If the function fails, the
callback will not be run and an exception will be raised into the
surrounding `.StackContext`.
If no callback is given, the caller should use the ``Future`` to
wait for the function to complete (perhaps by yielding it in a
`.gen.engine` function, or passing it to `.IOLoop.add_future`).
Usage:
.. testcode::
@return_future
def future_func(arg1, arg2, callback):
# Do stuff (possibly asynchronous)
callback(result)
@gen.engine
def caller(callback):
yield future_func(arg1, arg2)
callback()
..
Note that ``@return_future`` and ``@gen.engine`` can be applied to the
same function, provided ``@return_future`` appears first. However,
consider using ``@gen.coroutine`` instead of this combination.
"""
replacer = ArgReplacer(f, 'callback')
@functools.wraps(f)
def wrapper(*args, **kwargs):
future = TracebackFuture()
callback, args, kwargs = replacer.replace(
lambda value=_NO_RESULT: future.set_result(value),
args, kwargs)
def handle_error(typ, value, tb):
future.set_exc_info((typ, value, tb))
return True
exc_info = None
with ExceptionStackContext(handle_error):
try:
result = f(*args, **kwargs)
if result is not None:
raise ReturnValueIgnoredError(
"@return_future should not be used with functions "
"that return values")
except:
exc_info = sys.exc_info()
raise
if exc_info is not None:
# If the initial synchronous part of f() raised an exception,
# go ahead and raise it to the caller directly without waiting
# for them to inspect the Future.
future.result()
# If the caller passed in a callback, schedule it to be called
# when the future resolves. It is important that this happens
# just before we return the future, or else we risk confusing
# stack contexts with multiple exceptions (one here with the
# immediate exception, and again when the future resolves and
# the callback triggers its exception by calling future.result()).
if callback is not None:
def run_callback(future):
result = future.result()
if result is _NO_RESULT:
callback()
else:
callback(future.result())
future.add_done_callback(wrap(run_callback))
return future
return wrapper
def chain_future(a, b):
"""Chain two futures together so that when one completes, so does the other.
The result (success or failure) of ``a`` will be copied to ``b``, unless
``b`` has already been completed or cancelled by the time ``a`` finishes.
"""
def copy(future):
assert future is a
if b.done():
return
if (isinstance(a, TracebackFuture) and isinstance(b, TracebackFuture)
and a.exc_info() is not None):
b.set_exc_info(a.exc_info())
elif a.exception() is not None:
b.set_exception(a.exception())
else:
b.set_result(a.result())
a.add_done_callback(copy)
| gpl-3.0 | 8,939,050,279,770,725,000 | -5,193,086,740,088,689,000 | 35.585366 | 80 | 0.634444 | false |
balloob/home-assistant | tests/components/alarmdecoder/test_config_flow.py | 5 | 13829 | """Test the AlarmDecoder config flow."""
from alarmdecoder.util import NoDeviceError
import pytest
from homeassistant import config_entries, data_entry_flow
from homeassistant.components.alarmdecoder import config_flow
from homeassistant.components.alarmdecoder.const import (
CONF_ALT_NIGHT_MODE,
CONF_AUTO_BYPASS,
CONF_CODE_ARM_REQUIRED,
CONF_DEVICE_BAUD,
CONF_DEVICE_PATH,
CONF_RELAY_ADDR,
CONF_RELAY_CHAN,
CONF_ZONE_LOOP,
CONF_ZONE_NAME,
CONF_ZONE_NUMBER,
CONF_ZONE_RFID,
CONF_ZONE_TYPE,
DEFAULT_ARM_OPTIONS,
DEFAULT_ZONE_OPTIONS,
DOMAIN,
OPTIONS_ARM,
OPTIONS_ZONES,
PROTOCOL_SERIAL,
PROTOCOL_SOCKET,
)
from homeassistant.components.binary_sensor import DEVICE_CLASS_WINDOW
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_PROTOCOL
from homeassistant.core import HomeAssistant
from tests.async_mock import patch
from tests.common import MockConfigEntry
@pytest.mark.parametrize(
"protocol,connection,title",
[
(
PROTOCOL_SOCKET,
{
CONF_HOST: "alarmdecoder123",
CONF_PORT: 10001,
},
"alarmdecoder123:10001",
),
(
PROTOCOL_SERIAL,
{
CONF_DEVICE_PATH: "/dev/ttyUSB123",
CONF_DEVICE_BAUD: 115000,
},
"/dev/ttyUSB123",
),
],
)
async def test_setups(hass: HomeAssistant, protocol, connection, title):
"""Test flow for setting up the available AlarmDecoder protocols."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PROTOCOL: protocol},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "protocol"
with patch("homeassistant.components.alarmdecoder.config_flow.AdExt.open"), patch(
"homeassistant.components.alarmdecoder.config_flow.AdExt.close"
), patch(
"homeassistant.components.alarmdecoder.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.alarmdecoder.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"], connection
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == title
assert result["data"] == {
**connection,
CONF_PROTOCOL: protocol,
}
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_setup_connection_error(hass: HomeAssistant):
"""Test flow for setup with a connection error."""
port = 1001
host = "alarmdecoder"
protocol = PROTOCOL_SOCKET
connection_settings = {CONF_HOST: host, CONF_PORT: port}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PROTOCOL: protocol},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "protocol"
with patch(
"homeassistant.components.alarmdecoder.config_flow.AdExt.open",
side_effect=NoDeviceError,
), patch("homeassistant.components.alarmdecoder.config_flow.AdExt.close"):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], connection_settings
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "cannot_connect"}
async def test_options_arm_flow(hass: HomeAssistant):
"""Test arm options flow."""
user_input = {
CONF_ALT_NIGHT_MODE: True,
CONF_AUTO_BYPASS: True,
CONF_CODE_ARM_REQUIRED: True,
}
entry = MockConfigEntry(domain=DOMAIN)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={"edit_selection": "Arming Settings"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "arm_settings"
with patch(
"homeassistant.components.alarmdecoder.async_setup_entry", return_value=True
):
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input=user_input,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert entry.options == {
OPTIONS_ARM: user_input,
OPTIONS_ZONES: DEFAULT_ZONE_OPTIONS,
}
async def test_options_zone_flow(hass: HomeAssistant):
"""Test options flow for adding/deleting zones."""
zone_number = "2"
zone_settings = {CONF_ZONE_NAME: "Front Entry", CONF_ZONE_TYPE: DEVICE_CLASS_WINDOW}
entry = MockConfigEntry(domain=DOMAIN)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={"edit_selection": "Zones"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_select"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={CONF_ZONE_NUMBER: zone_number},
)
with patch(
"homeassistant.components.alarmdecoder.async_setup_entry", return_value=True
):
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input=zone_settings,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert entry.options == {
OPTIONS_ARM: DEFAULT_ARM_OPTIONS,
OPTIONS_ZONES: {zone_number: zone_settings},
}
# Make sure zone can be removed...
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={"edit_selection": "Zones"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_select"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={CONF_ZONE_NUMBER: zone_number},
)
with patch(
"homeassistant.components.alarmdecoder.async_setup_entry", return_value=True
):
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert entry.options == {
OPTIONS_ARM: DEFAULT_ARM_OPTIONS,
OPTIONS_ZONES: {},
}
async def test_options_zone_flow_validation(hass: HomeAssistant):
"""Test input validation for zone options flow."""
zone_number = "2"
zone_settings = {CONF_ZONE_NAME: "Front Entry", CONF_ZONE_TYPE: DEVICE_CLASS_WINDOW}
entry = MockConfigEntry(domain=DOMAIN)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={"edit_selection": "Zones"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_select"
# Zone Number must be int
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={CONF_ZONE_NUMBER: "asd"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_select"
assert result["errors"] == {CONF_ZONE_NUMBER: "int"}
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={CONF_ZONE_NUMBER: zone_number},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_details"
# CONF_RELAY_ADDR & CONF_RELAY_CHAN are inclusive
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={**zone_settings, CONF_RELAY_ADDR: "1"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_details"
assert result["errors"] == {"base": "relay_inclusive"}
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={**zone_settings, CONF_RELAY_CHAN: "1"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_details"
assert result["errors"] == {"base": "relay_inclusive"}
# CONF_RELAY_ADDR, CONF_RELAY_CHAN must be int
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={**zone_settings, CONF_RELAY_ADDR: "abc", CONF_RELAY_CHAN: "abc"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_details"
assert result["errors"] == {
CONF_RELAY_ADDR: "int",
CONF_RELAY_CHAN: "int",
}
# CONF_ZONE_LOOP depends on CONF_ZONE_RFID
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={**zone_settings, CONF_ZONE_LOOP: "1"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_details"
assert result["errors"] == {CONF_ZONE_LOOP: "loop_rfid"}
# CONF_ZONE_LOOP must be int
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={**zone_settings, CONF_ZONE_RFID: "rfid123", CONF_ZONE_LOOP: "ab"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_details"
assert result["errors"] == {CONF_ZONE_LOOP: "int"}
# CONF_ZONE_LOOP must be between [1,4]
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={**zone_settings, CONF_ZONE_RFID: "rfid123", CONF_ZONE_LOOP: "5"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zone_details"
assert result["errors"] == {CONF_ZONE_LOOP: "loop_range"}
# All valid settings
with patch(
"homeassistant.components.alarmdecoder.async_setup_entry", return_value=True
):
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
**zone_settings,
CONF_ZONE_RFID: "rfid123",
CONF_ZONE_LOOP: "2",
CONF_RELAY_ADDR: "12",
CONF_RELAY_CHAN: "1",
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert entry.options == {
OPTIONS_ARM: DEFAULT_ARM_OPTIONS,
OPTIONS_ZONES: {
zone_number: {
**zone_settings,
CONF_ZONE_RFID: "rfid123",
CONF_ZONE_LOOP: 2,
CONF_RELAY_ADDR: 12,
CONF_RELAY_CHAN: 1,
}
},
}
@pytest.mark.parametrize(
"protocol,connection",
[
(
PROTOCOL_SOCKET,
{
CONF_HOST: "alarmdecoder123",
CONF_PORT: 10001,
},
),
(
PROTOCOL_SERIAL,
{
CONF_DEVICE_PATH: "/dev/ttyUSB123",
CONF_DEVICE_BAUD: 115000,
},
),
],
)
async def test_one_device_allowed(hass, protocol, connection):
"""Test that only one AlarmDecoder device is allowed."""
flow = config_flow.AlarmDecoderFlowHandler()
flow.hass = hass
MockConfigEntry(
domain=DOMAIN,
data=connection,
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PROTOCOL: protocol},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "protocol"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], connection
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
| apache-2.0 | 8,882,228,377,592,459,000 | -8,379,013,404,744,657,000 | 31.160465 | 88 | 0.623979 | false |
rsyvarth/simple-blog | lib/wtforms/ext/sqlalchemy/validators.py | 40 | 1225 | from __future__ import unicode_literals
import warnings
from wtforms import ValidationError
from sqlalchemy.orm.exc import NoResultFound
class Unique(object):
"""Checks field value unicity against specified table field.
:param get_session:
A function that return a SQAlchemy Session.
:param model:
The model to check unicity against.
:param column:
The unique column.
:param message:
The error message.
"""
field_flags = ('unique', )
def __init__(self, get_session, model, column, message=None):
warnings.warn('The Unique validator will be removed in WTForms 1.1', DeprecationWarning)
self.get_session = get_session
self.model = model
self.column = column
self.message = message
def __call__(self, form, field):
try:
obj = self.get_session().query(self.model)\
.filter(self.column == field.data).one()
if not hasattr(form, '_obj') or not form._obj == obj:
if self.message is None:
self.message = field.gettext('Already exists.')
raise ValidationError(self.message)
except NoResultFound:
pass
| mit | -8,970,705,857,160,172,000 | -4,284,689,082,044,385,300 | 32.108108 | 96 | 0.614694 | false |
ygol/dotfiles | bin/.venv-ansible-venv/lib/python2.6/site-packages/pip/exceptions.py | 398 | 1086 | """Exceptions used throughout package"""
class PipError(Exception):
"""Base pip exception"""
class InstallationError(PipError):
"""General exception during installation"""
class UninstallationError(PipError):
"""General exception during uninstallation"""
class DistributionNotFound(InstallationError):
"""Raised when a distribution cannot be found to satisfy a requirement"""
class BestVersionAlreadyInstalled(PipError):
"""Raised when the most up-to-date version of a package is already
installed. """
class BadCommand(PipError):
"""Raised when virtualenv or a command is not found"""
class CommandError(PipError):
"""Raised when there is an error in command-line arguments"""
class PreviousBuildDirError(PipError):
"""Raised when there's a previous conflicting build directory"""
class HashMismatch(InstallationError):
"""Distribution file hash values don't match."""
class InvalidWheelFilename(InstallationError):
"""Invalid wheel filename."""
class UnsupportedWheel(InstallationError):
"""Unsupported wheel."""
| mit | -1,589,613,400,494,358,800 | 6,580,668,708,235,068,000 | 22.608696 | 77 | 0.739411 | false |
taschik/ramcloud-load-manager | scripts/config.py | 2 | 3706 | #!/usr/bin/env python
# Copyright (c) 2011 Stanford University
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR(S) DISCLAIM ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL AUTHORS BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
This module defines a collection of variables that specify site-specific
configuration information such as names of RAMCloud hosts and the location
of RAMCloud binaries. This should be the only file you have to modify to
run RAMCloud scripts at your site.
"""
from common import captureSh
import os
import re
import subprocess
import sys
__all__ = ['coordinator_port', 'default_disk1','default_disk2', 'git_branch',
'hosts', 'obj_dir', 'obj_path', 'scripts_path', 'second_backup_port',
'server_port', 'top_path']
# git_branch is the name of the current git branch, which is used
# for purposes such as computing objDir.
try:
git_branch = re.search('^refs/heads/(.*)$',
captureSh('git symbolic-ref -q HEAD 2>/dev/null'))
except subprocess.CalledProcessError:
git_branch = None
obj_dir = 'obj'
else:
git_branch = git_branch.group(1)
obj_dir = 'obj.%s' % git_branch
# obj_dir is the name of the directory containing binaries for the current
# git branch (it's just a single name such as "obj.master", not a full path)
if git_branch == None:
obj_dir = 'obj'
else:
obj_dir = 'obj.%s' % git_branch
# The full path name of the directory containing this script file.
scripts_path = os.path.dirname(os.path.abspath(__file__))
# The full pathname of the parent of scriptsPath (the top-level directory
# of a RAMCloud source tree).
top_path = os.path.abspath(scripts_path + '/..')
# Add /usr/local/lib to LD_LIBARY_PATH it isn't already there (this was
# needed for CentOS 5.5, but should probably be deleted now).
try:
ld_library_path = os.environ['LD_LIBRARY_PATH'].split(':')
except KeyError:
ld_library_path = []
if '/usr/local/lib' not in ld_library_path:
ld_library_path.insert(0, '/usr/local/lib')
os.environ['LD_LIBRARY_PATH'] = ':'.join(ld_library_path)
# All of the hosts available for servers or clients; each entry
# consists of a name for the host (for ssh), an IP address
# to use for creating service locators. and an id for generating
# Ethernet addresses.
hosts = []
for i in range(1, 61):
hosts.append(('rc%02d' % i,
'192.168.1.%d' % (100 + i),
i))
# Host on which old master is run for running recoveries.
# Need not be a member of hosts
old_master_host = ('rcmaster', '192.168.1.1', 81)
# Full path to the directory containing RAMCloud executables.
obj_path = '%s/%s' % (top_path, obj_dir)
# Ports (for TCP, etc.) to use for each kind of server.
coordinator_port = 12246
server_port = 12247
second_backup_port = 12248
# Command-line argument specifying where the first backup on each
# server should storage the segment replicas.
default_disk1 = '-f /dev/sda2'
# Command-line argument specifying where the second backup should
# store its segment replicas.
default_disk2 = '-f /dev/sdb2'
# Try to include local overrides.
try:
from localconfig import *
except:
pass
| isc | -2,455,987,876,924,469,000 | -8,767,187,747,149,635,000 | 34.634615 | 77 | 0.7102 | false |
eliezerfot123/django-with-angular | django_with_angular_je/django_with_angular_je/settings.py | 1 | 2686 | """
Django settings for django_with_angular_je project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '0q^+*z#p3in1od3$@4s_m4*#ohpo71454go_=%8na5dg6%uc33'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'django_with_angular_je.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_with_angular_je.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
| gpl-2.0 | 3,530,054,537,553,236,000 | 5,957,046,999,018,424,000 | 25.333333 | 71 | 0.693596 | false |
seckyn/jaikuengine | common/management/commands/clean.py | 35 | 1150 | # Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from optparse import make_option
from django.core.management.base import BaseCommand
import build
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
'--skip-zip', action='store_true', dest='skip_zip', default=False,
help='Do not clean up zip files'
),
)
help = 'Cleans up the results of a build'
args = ''
requires_model_validation = False
def handle(self, *test_labels, **options):
skip_zip = options.get('skip_zip', False)
build.clean(skip_zip=skip_zip)
| apache-2.0 | -5,220,160,358,521,978,000 | 8,353,794,793,812,191,000 | 30.081081 | 76 | 0.710435 | false |
ubalance-team/magum | magum/magum.py | 1 | 26743 |
#!/usr/bin/python
"""
MAGUM python module (Beta 1.1.0)
MAGUM stands for (Magnetometer, Accelerometer and Gyroscope Udoo Management)
it includes some modules such as smbus, time, os, sys, subprocess etc.. to manage the udoo-neo
motion sensors over the I2C serial communicaton protocol.
Because the I2C device interface is opened R/W using smbus module,
users of this module usually MUST have ROOT permissions.
"""
# including necessary modules
import smbus
import time
import sys
import os
import shlex
import subprocess
import re
import math
from array import *
from .utils import _dataConvertion
from .utils import _regsExample
from .regs import *
class Magum:
""" Magum(gScaleRange,fsDouble,aScaleRange,noise) -> Magum
Return a new Magum object that is (optionally)
automatically initialized with the default values.
"""
_i2cBus = smbus.SMBus(3) # open communication to I2C channel 4
_calibrated = False # check calibration
accScale = None
gyrScale = None
gyrDouble = None
# Complementary Filter Attributes
compAux = 0
_cFAngleX = 0
_cFAngleY = 0
_cFAngleZ = 0
compAux = 0
def __init__(self,gScaleRange=None,fsDouble=None,aScaleRange=None,noise=None):
self.killDrivers(1)
self._initAm(aScaleRange,noise)
self._initG(gScaleRange,fsDouble)
# accelerometer and magnetometer initialization
def _initAm(self,scaleRange=None,noise=None):
self.toStandby('a')
if noise == 1 and scaleRange in (2,4):
regNoise = 0x0c
elif noise in (0,None):
regNoise = 0x00
else:
print 'Error: incorrect low noise value, it can assume 1 (enabled) or 0 (diabled)'
sys.exit(1)
if scaleRange == 2:
self.setSensConf('a','A_XYZ_DATA_CFG',0x00) # set range to +/- 2g
elif scaleRange == 4:
self.setSensConf('a','A_XYZ_DATA_CFG',0x01) # set range to +/- 4g
elif scaleRange == 8:
self.setSensConf('a','A_XYZ_DATA_CFG',0x02) # set range to +/- 8g
elif scaleRange == None:
self._i2cBus.write_byte_data(I2C_AM_ADDRESS,A_CTRL_REG1,0x01) # set to active mode
time.sleep(.300) # sleep 300 ms
else:
print 'Error: incorrect aScalRange value, read the documentation for the correct config.'
sys.exit(1)
self.accScale = scaleRange
self._i2cBus.write_byte_data(I2C_AM_ADDRESS,A_CTRL_REG1,0x01 | regNoise) # set to active mode
time.sleep(.300) # sleep 300 ms
self._i2cBus.write_byte_data(I2C_AM_ADDRESS,M_CTRL_REG1,0x03) # enable both accelerometer and magnetometer sensors
# gyroscope initialization
def _initG(self,scaleRange=None,fsDouble=None):
self.toStandby('g')
if fsDouble == 1:
self.gyrDouble = 2
self.setSensConf('g','G_CTRL_REG3',0x01)
elif fsDouble == 0:
self.gyrDouble = 1
self.setSensConf('g','G_CTRL_REG3',0x00)
else:
self.gyrDouble = 1
self.setSensConf('g','G_CTRL_REG3',0x00)
if scaleRange == 2000:
self.setSensConf('g','G_CTRL_REG0',0x00) # set range to +/- 2000dps (4000dps if CTRL_REG3 is set to double)
elif scaleRange == 1000:
self.setSensConf('g','G_CTRL_REG0',0x01) # set range to +/- 1000dps (2000dps if CTRL_REG3 is set to double)
elif scaleRange == 500:
self.setSensConf('g','G_CTRL_REG0',0x02) # set range to +/- 500dps (1000dps if CTRL_REG3 is set to double)
elif scaleRange == 250:
self.setSensConf('g','G_CTRL_REG0',0x03) # set range to +/- 250dps (500dps if CTRL_REG3 is set to double)
elif scaleRange == None:
self._i2cBus.write_byte_data(I2C_G_ADDRESS,A_CTRL_REG1,0x16) # set to active mode
time.sleep(.300) # sleep 300 ms
else:
print 'Error: incorrect gScalRange value, read the documentation for the corret config.'
sys.exit(1)
self.gyrScale = scaleRange
self._i2cBus.write_byte_data(I2C_G_ADDRESS,G_CTRL_REG1,0x16) # set to active mode
time.sleep(.300) # sleep 300ms
def toStandby(self,sensor):
if sensor in ('a','m'):
currReg = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_CTRL_REG1) # get current configuration
if currReg % 2 == 1:
self._i2cBus.write_byte_data(I2C_AM_ADDRESS,A_CTRL_REG1,currReg - 1) # set to standby_mode
if sensor in ('g'):
currReg = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_CTRL_REG1) # get old configuration
currReg = currReg >> 2
currReg = currReg << 2
self._i2cBus.write_byte_data(I2C_G_ADDRESS,G_CTRL_REG1,currReg) # set to standby_mode
time.sleep(.300) # sleep 300ms
def toActive(self,sensor):
if sensor in ('a','m'):
currReg = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_CTRL_REG1) # get current configuration
self._i2cBus.write_byte_data(I2C_AM_ADDRESS,A_CTRL_REG1,currReg) # set to active_mode
if sensor in ('g'):
currReg = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_CTRL_REG1) # get old configuration
currReg = currReg >> 2
currReg = currReg << 2
currReg = currReg + 2
self._i2cBus.write_byte_data(I2C_G_ADDRESS,G_CTRL_REG1,currReg) # set to active_mode
time.sleep(.300) # sleep 300ms
# enable/disable system drivers
def killDrivers(self,x):
proc1 = subprocess.Popen(shlex.split('lsmod'),stdout=subprocess.PIPE)
proc2 = subprocess.Popen(shlex.split('grep fxas2100x'),stdin=proc1.stdout,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
proc1.stdout.close() # Allow proc1 to receive a SIGPIPE if proc2 exits.
out1,err1=proc2.communicate()
proc1 = subprocess.Popen(shlex.split('lsmod'),stdout=subprocess.PIPE)
proc2 = subprocess.Popen(shlex.split('grep fxos8700'),stdin=proc1.stdout,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
proc1.stdout.close() # Allow proc1 to receive a SIGPIPE if proc2 exits.
out2,err2=proc2.communicate()
if x == 1:
if out1:
os.system('rmmod fxas2100x')
if out2:
os.system('rmmod fxos8700')
elif x == 0:
if not out1:
os.system('modprobe fxas2100x')
if not out2:
os.system('modprobe fxos8700')
else:
print "Error: wrong killDrivers(x) parameter.\n self.killDrivers(0): enable drivers \n killDrivers(1): disable drivers."
sys.exit(1)
# sensor calibration
def calibrateSens(self,samples):
acc_angle = array('i',[])
rate_gyr = array('i',[])
i = 0
sumX = 0
sumY = 0
sumZ = 0
gsumX = 0
gsumY = 0
gsumZ = 0
tarXvect = array('i',[])
tarYvect = array('i',[])
tarZvect = array('i',[])
gtarXvect = array('i',[])
gtarYvect = array('i',[])
gtarZvect = array('i',[])
gyrXangle = 0.0
gyrYangle = 0.0
gyrZangle = 0.0
accXangle = 0.0
accYangle = 0.0
accZangle = 0.0
axisOffset = array('i',[])
# sensors Calibration
raw_input("CAUTION! Sensors calibration.\nSet your udoo-neo in an horizontal position and press Enter Key...\n")
perc = -1
while i<samples:
acc_angle = self.readAData()
rate_gyr = self.readGData()
factor = self.accScale/2
if acc_angle[0] >= 32768:
tarXvect.insert(i,int(acc_angle[0]-65536))
else:
tarXvect.insert(i,int(acc_angle[0]))
if acc_angle[1] >= 32768:
tarYvect.insert(i,int(acc_angle[1]-65536))
else:
tarYvect.insert(i,int(acc_angle[1]))
if acc_angle[2] >= 32768:
tarZvect.insert(i,int(acc_angle[2] - 65536 + 16384/factor))
else:
tarZvect.insert(i,int(acc_angle[2] + 16384/factor))
if rate_gyr[0] >= 32768:
gtarXvect.insert(i,int(rate_gyr[0]-65536))
else:
gtarXvect.insert(i,int(rate_gyr[0]))
if rate_gyr[1] >= 32768:
gtarYvect.insert(i,int(rate_gyr[1]-65536))
else:
gtarYvect.insert(i,int(rate_gyr[1]))
if rate_gyr[2] >= 32768:
gtarZvect.insert(i,int(rate_gyr[2] - 65536))
else:
gtarZvect.insert(i,int(rate_gyr[2]))
sumX += tarXvect[i]
sumY += tarYvect[i]
sumZ += tarZvect[i]
gsumX += gtarXvect[i]
gsumY += gtarYvect[i]
gsumZ += gtarZvect[i]
loading = int((i*100)/samples)
if loading != perc:
print "Calibration percentage: " + str(int(loading)) + "%"
perc = loading
i += 1
print "Calibration percentage: 100%\n"
avgX = sumX/samples
avgY = sumY/samples
avgZ = sumZ/samples
gavgX = gsumX/samples
gavgY = gsumY/samples
gavgZ = gsumZ/samples
axisOffset.insert(0,avgX)
axisOffset.insert(1,avgY)
axisOffset.insert(2,avgZ)
axisOffset.insert(3,gavgX)
axisOffset.insert(4,gavgY)
axisOffset.insert(5,gavgZ)
self._calibrated = True
return axisOffset
# set sensors configurations
def setSensConf(self,sensor,reg,hexVal):
self.toStandby(sensor)
if sensor == 'a':
if reg in A_CREGS_LIST:
self._i2cBus.write_byte_data(I2C_AM_ADDRESS,COMPLETE_REGS_DICT[reg],hexVal)
else:
_regsExample('a')
if sensor == 'm':
if reg in M_CREGS_LIST:
if bool(is_hex(str(hexVal))):
self._i2cBus.write_byte_data(I2C_AM_ADDRESS,COMPLETE_REGS_DICT[reg],hexVal)
else:
_regsExample('m')
if sensor == 'g':
if reg in G_CREG_LIST:
self._i2cBus.write_byte_data(I2C_AM_ADDRESS,COMPLETE_REGS_DICT[reg],hexVal)
else:
_regsExample('g')
time.sleep(.300) # sleep 300ms
self.toActive(sensor)
# read accelerometer data
def readAData(self,uM=None):
axisList = array('f',[])
# getting x,y,z coordinate shifting first 8bit and adding
# (with the or operator) the others 8 bit to the address
xMsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OUT_X_MSB)
xLsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OUT_X_LSB)
xRaw = (xMsbRaw << 8 | xLsbRaw) # x axis
yMsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OUT_Y_MSB)
yLsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OUT_Y_LSB)
yRaw = (yMsbRaw << 8 | yLsbRaw) # y axis
zMsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OUT_Z_MSB)
zLsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OUT_Z_LSB)
zRaw = (zMsbRaw << 8 | zLsbRaw) # z axis
axisList.insert(0,xRaw)
axisList.insert(1,yRaw)
axisList.insert(2,zRaw)
axisList = _dataConvertion(self._i2cBus,"a",axisList,uM)
return axisList
# read magnetometer data
def readMData(self,uM=None):
axisList = array('f',[])
# getting x,y,z coordinate shifting first 8bit and adding
# (with the or operator) the others 8 bit to the address
xMsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,M_OUT_X_MSB)
xLsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,M_OUT_X_LSB)
xRaw = xMsbRaw << 8 | xLsbRaw # x axis
yMsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,M_OUT_Y_MSB)
yLsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,M_OUT_Y_LSB)
yRaw = yMsbRaw << 8 | yLsbRaw # y axis
zMsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,M_OUT_Z_MSB)
zLsbRaw = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,M_OUT_Z_LSB)
zRaw = zMsbRaw << 8 | zLsbRaw # z axis
axisList.insert(0,xRaw)
axisList.insert(1,yRaw)
axisList.insert(2,zRaw)
axisList = _dataConvertion(self._i2cBus,'m',axisList,uM)
return axisList
# read gyroscope data
def readGData(self,uM=None):
axisList = array('f',[])
# getting x,y,z coordinate shifting first 8bit and adding
# (with the or operator) the others 8 bit to the address
xMsbRaw = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_OUT_X_MSB)
xLsbRaw = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_OUT_X_LSB)
xRaw = xMsbRaw << 8 | xLsbRaw # x axis
yMsbRaw = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_OUT_Y_MSB)
yLsbRaw = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_OUT_Y_LSB)
yRaw = yMsbRaw << 8 | yLsbRaw # y axis
zMsbRaw = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_OUT_Z_MSB)
zLsbRaw = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_OUT_Z_LSB)
zRaw = zMsbRaw << 8 | zLsbRaw # z axis
axisList.insert(0,xRaw)
axisList.insert(1,yRaw)
axisList.insert(2,zRaw)
axisList = _dataConvertion(self._i2cBus,"g",axisList,uM)
return axisList
def readTData(self,uM=None):
tempRaw= self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_TEMP)
if tempRaw >= 128:
tempCels= float((tempRaw-256)*0.96)
else:
tempCels=float((tempRaw)*0.96)
if uM in (None, 'raw'):
return tempRaw
if uM == 'C':
return tempCels
if uM == 'K':
tempKelv= float(tempCels + 273.15)
return tempKelv
if uM == 'F':
tempFahr= float(float(tempCels * 1.8)+32)
return tempFahr
# complementary filter algorithm
def compFilter(self,DT,axisOffset):
exTime = 0.013 # execution time
if DT < exTime:
print "Error: DT is too small to sample the accelerometer and gyroscope data.\nDT must be greater than 0.013."
sys.exit(1)
else:
if self._calibrated == True:
highPass = DT / (DT + exTime)
rate_gyr = array('i',[])
acc_angle = array('i',[])
cFAngleAxis = array('f',[])
rate_gyr = self.readGData()
acc_angle = self.readAData()
factor = self.accScale/2
gFactor = float((self.gyrScale/(1000*32))*self.gyrDouble)
if acc_angle[0] >= 32768:
acc_angle[0] -= 65536
if acc_angle[1] >= 32768:
acc_angle[1] -= 65536
if acc_angle[2] >= 32768:
acc_angle[2] -= 65536
if rate_gyr[0] >= 32768:
rate_gyr[0] -= 65536
if rate_gyr[1] >= 32768:
rate_gyr[1] -= 65536
if rate_gyr[2] >= 32768:
rate_gyr[2] -= 65536
x = (((acc_angle[0] - axisOffset[0])/4) * 0.244 * factor)
y = (((acc_angle[1] - axisOffset[1])/4) * 0.244 * factor)
z = (((acc_angle[2] - axisOffset[2])/4) * 0.244 * factor)
x2 = x * x
y2 = y * y
z2 = z * z
accXangle = math.atan(x/math.sqrt(y2+z2))*(180/math.pi)
accYangle = math.atan(y/math.sqrt(x2+z2))*(180/math.pi)
accZangle = math.atan(z/math.sqrt(x2+y2))*(180/math.pi)
gyrXangle = float(((rate_gyr[0] - axisOffset[3]) * gFactor)/DT)
gyrYangle = float(((rate_gyr[1] - axisOffset[4]) * gFactor)/DT)
gyrZangle = float(((rate_gyr[2] - axisOffset[5]) * gFactor)/DT)
modGyr = (gyrXangle*gyrXangle) + (gyrYangle*gyrYangle) + (gyrZangle*gyrZangle)
# Only for the first time we get the position or if the base doesn't move
#if self.compAux == 0 || (math.fabs(gyrXangle) <= 5 && math.fabs(gyrYangle) <= 5 && math.fabs(gyrZangle) <= 5):
if self.compAux == 0:
self._cFAngleX = float(accXangle)
self._cFAngleY = float(accYangle)
self._cFAngleZ = float(accZangle)
self.compAux = 1
else: # Then we use the Complementary Filter
self._cFAngleX = (highPass) * (self._cFAngleX + gyrXangle * DT) + (1-highPass)*(accXangle)
self._cFAngleY = (highPass) * (self._cFAngleY + gyrYangle * DT) + (1-highPass)*(accYangle)
self._cFAngleZ = (highPass) * (self._cFAngleZ + gyrZangle * DT) + (1-highPass)*(accZangle)
cFAngleAxis.insert(0,self._cFAngleX)
cFAngleAxis.insert(1,self._cFAngleY*(-1))
cFAngleAxis.insert(2,self._cFAngleZ*(-1))
gyrXangle = float((rate_gyr[0] - axisOffset[3]) * gFactor)
gyrYangle = float((rate_gyr[1] - axisOffset[4]) * gFactor)
gyrZangle = float((rate_gyr[2] - axisOffset[5]) * gFactor)
time.sleep(DT-exTime)
return cFAngleAxis
else:
print "Error: failed calibration.\nMake sure to calibrate the sensors using calibrateSens(sensor,samples)"
sys.exit(1)
# Kalman Filter
# Note: this algorithm is under development, it may not work properly like a common Kalman Filter
# If you want to improve this algorithm join us on github at https://github.com/ubalance-team/magum
def kalmanFilter(self,DT,axis,axisOffset):
exTime = 0.012 # execution time
if DT < exTime:
print "Error: DT is too small to sample the accelerometer and gyroscope data.\nDT must be greater than 0.015."
sys.exit(1)
else:
if self._calibrated == True:
rate_gyr = self.readGData()
acc_angle = self.readAData()
factor = self.accScale/2
gFactor = float((self.gyrScale/(1000*32))*self.gyrDouble)
if acc_angle[0] >= 32768:
acc_angle[0] -= 65536
if acc_angle[1] >= 32768:
acc_angle[1] -= 65536
if acc_angle[2] >= 32768:
acc_angle[2] -= 65536
if rate_gyr[0] >= 32768:
rate_gyr[0] -= 65536
if rate_gyr[1] >= 32768:
rate_gyr[1] -= 65536
if rate_gyr[2] >= 32768:
rate_gyr[2] -= 65536
x = (((acc_angle[0] - axisOffset[0])/4) * 0.244 * factor)
y = (((acc_angle[1] - axisOffset[1])/4) * 0.244 * factor)
z = (((acc_angle[2] - axisOffset[2])/4) * 0.244 * factor)
x2 = x * x
y2 = y * y
z2 = z * z
if axis == 'x':
accAngle = math.atan(x/math.sqrt(y2+z2))*(180/math.pi)
gyroRate = float((rate_gyr[0] - axisOffset[3]) * gFactor)
elif axis == 'y':
accAngle = math.atan(y/math.sqrt(x2+z2))*(180/math.pi)*(-1)
gyroRate = float((rate_gyr[1] - axisOffset[4]) * gFactor)
elif axis == 'z':
accAngle = math.atan(z/math.sqrt(x2+y2))*(180/math.pi)*(-1)
gyroRate = float((rate_gyr[2] - axisOffset[5]) * gFactor)
Q_angle = 0.01
Q_gyro = 0.0003
R_angle = 0.01
a_bias = 0
AP_00 = 0
AP_01 = 0
AP_10 = 0
AP_11 = 0
KFangle = 0.0
KFangle += DT * (gyroRate - a_bias)
AP_00 += - DT * (AP_10 + AP_01) + Q_angle * DT
AP_01 += - DT * AP_11
AP_10 += - DT * AP_11
AP_11 += + Q_gyro * DT
a = accAngle - KFangle
S = AP_00 + R_angle
K_0 = AP_00 / S
K_1 = AP_10 / S
KFangle += K_0 * a
a_bias += K_1 * a
AP_00 -= K_0 * AP_00
AP_01 -= K_0 * AP_01
AP_10 -= K_1 * AP_00
AP_11 -= K_1 * AP_01
time.sleep(DT-exTime)
return KFangle*float(180/math.pi)*0.9
else:
print "Error: failed calibration.\nMake sure to calibrate the sensors using calibrateSens(sensor,samples)"
sys.exit(1)
# Implementation of Sebastian Madgwick's "...efficient orientation filter for... inertial/magnetic sensor arrays"
# (see http://www.x-io.co.uk/category/open-source/ for examples and more details)
# which fuses acceleration, rotation rate, and magnetic moments to produce a quaternion-based estimate of absolute
# device orientation
def madgwickQuaternionFilter(self,aCompArray,gCompArray,mCompArray):
ax = aCompArray[0]
ay = aCompArray[1]
az = aCompArray[2]
mx = mCompArray[0]
my = mCompArray[1]
mz = mCompArray[2]
gx = gCompArray[0]
gy = gCompArray[1]
gz = gCompArray[2]
deltat = 0.001
gyroMeasError = math.pi * (5.0 / 180.0)
gyroMeasDrift = math.pi * (0.2 / 180.0)
beta = math.sqrt(3.0 / 4.0) * gyroMeasError
zeta = math.sqrt(3.0 / 4.0) * gyroMeasDrift
q = array('f',[])
q1 = 1.0
q2 = 0.0
q3 = 0.0
q4 = 0.0
norm = 0.0
hx = 0.0
hy = 0.0
_2bx = 0.0
_2bz = 0.0
s1 = 0.0
s2 = 0.0
s3 = 0.0
s4 = 0.0
qDot1 = 0.0
qDot2 = 0.0
qDot3 = 0.0
qDot4 = 0.0
# Auxiliary variables to avoid repeated arithmetic
_2q1mx = 0.0
_2q1my = 0.0
_2q1mz = 0.0
_2q2mx = 0.0
_4bx = 0.0
_4bz = 0.0
_2q1 = 2.0 * q1
_2q2 = 2.0 * q2
_2q3 = 2.0 * q3
_2q4 = 2.0 * q4
_2q1q3 = 2.0 * q1 * q3
_2q3q4 = 2.0 * q3 * q4
q1q1 = q1 * q1
q1q2 = q1 * q2
q1q3 = q1 * q3
q1q4 = q1 * q4
q2q2 = q2 * q2
q2q3 = q2 * q3
q2q4 = q2 * q4
q3q3 = q3 * q3
q3q4 = q3 * q4
q4q4 = q4 * q4
# Normalize accelerometer measurement
norm = math.sqrt(ax * ax + ay * ay + az * az)
if norm == 0.0: return # handle NaN
norm = 1.0/norm
ax *= norm
ay *= norm
az *= norm
# Normalize magnetometer measurement
norm = math.sqrt(mx * mx + my * my + mz * mz)
if norm == 0.0: return # handle NaN
norm = 1.0/norm
mx *= norm
my *= norm
mz *= norm
# Reference direction of Earth s magnetic field
_2q1mx = 2.0 * q1 * mx
_2q1my = 2.0 * q1 * my
_2q1mz = 2.0 * q1 * mz
_2q2mx = 2.0 * q2 * mx
hx = mx * q1q1 - _2q1my * q4 + _2q1mz * q3 + mx * q2q2 + _2q2 * my * q3 + _2q2 * mz * q4 - mx * q3q3 - mx * q4q4
hy = _2q1mx * q4 + my * q1q1 - _2q1mz * q2 + _2q2mx * q3 - my * q2q2 + my * q3q3 + _2q3 * mz * q4 - my * q4q4
_2bx = math.sqrt(hx * hx + hy * hy)
_2bz = -_2q1mx * q3 + _2q1my * q2 + mz * q1q1 + _2q2mx * q4 - mz * q2q2 + _2q3 * my * q4 - mz * q3q3 + mz * q4q4
_4bx = 2.0 * _2bx
_4bz = 2.0 * _2bz
# Gradient decent algorithm corrective step
s1 = -_2q3 * (2.0 * q2q4 - _2q1q3 - ax) + _2q2 * (2.0 * q1q2 + _2q3q4 - ay) - _2bz * q3 * (_2bx * (0.5 - q3q3 - q4q4) + _2bz * (q2q4 - q1q3) - mx) + (-_2bx * q4 + _2bz * q2) * (_2bx * (q2q3 - q1q4) + _2bz * (q1q2 + q3q4) - my) + _2bx * q3 * (_2bx * (q1q3 + q2q4) + _2bz * (0.5 - q2q2 - q3q3) - mz)
s2 = _2q4 * (2.0 * q2q4 - _2q1q3 - ax) + _2q1 * (2.0 * q1q2 + _2q3q4 - ay) - 4.0 * q2 * (1.0 - 2.0 * q2q2 - 2.0 * q3q3 - az) + _2bz * q4 * (_2bx * (0.5 - q3q3 - q4q4) + _2bz * (q2q4 - q1q3) - mx) + (_2bx * q3 + _2bz * q1) * (_2bx * (q2q3 - q1q4) + _2bz * (q1q2 + q3q4) - my) + (_2bx * q4 - _4bz * q2) * (_2bx * (q1q3 + q2q4) + _2bz * (0.5 - q2q2 - q3q3) - mz)
s3 = -_2q1 * (2.0 * q2q4 - _2q1q3 - ax) + _2q4 * (2.0 * q1q2 + _2q3q4 - ay) - 4.0 * q3 * (1.0 - 2.0 * q2q2 - 2.0 * q3q3 - az) + (-_4bx * q3 - _2bz * q1) * (_2bx * (0.5 - q3q3 - q4q4) + _2bz * (q2q4 - q1q3) - mx) + (_2bx * q2 + _2bz * q4) * (_2bx * (q2q3 - q1q4) + _2bz * (q1q2 + q3q4) - my) + (_2bx * q1 - _4bz * q3) * (_2bx * (q1q3 + q2q4) + _2bz * (0.5 - q2q2 - q3q3) - mz)
s4 = _2q2 * (2.0 * q2q4 - _2q1q3 - ax) + _2q3 * (2.0 * q1q2 + _2q3q4 - ay) + (-_4bx * q4 + _2bz * q2) * (_2bx * (0.5 - q3q3 - q4q4) + _2bz * (q2q4 - q1q3) - mx) + (-_2bx * q1 + _2bz * q3) * (_2bx * (q2q3 - q1q4) + _2bz * (q1q2 + q3q4) - my) + _2bx * q2 * (_2bx * (q1q3 + q2q4) + _2bz * (0.5 - q2q2 - q3q3) - mz)
norm = math.sqrt(s1 * s1 + s2 * s2 + s3 * s3 + s4 * s4) # normalize step magnitude
norm = 1.0/norm
s1 *= norm
s2 *= norm
s3 *= norm
s4 *= norm
# Compute rate of change of quaternion
qDot1 = 0.5 * (-q2 * gx - q3 * gy - q4 * gz) - beta * s1
qDot2 = 0.5 * (q1 * gx + q3 * gz - q4 * gy) - beta * s2
qDot3 = 0.5 * (q1 * gy - q2 * gz + q4 * gx) - beta * s3
qDot4 = 0.5 * (q1 * gz + q2 * gy - q3 * gx) - beta * s4
# Integrate to yield quaternion
q1 += qDot1 * deltat
q2 += qDot2 * deltat
q3 += qDot3 * deltat
q4 += qDot4 * deltat
norm = math.sqrt(q1 * q1 + q2 * q2 + q3 * q3 + q4 * q4) # normalize quaternion
norm = 1.0/norm
q.insert(0,q1 * norm)
q.insert(1,q2 * norm)
q.insert(2,q3 * norm)
q.insert(3,q4 * norm)
return q
# get current sensors configurtaions
def getCurrentConf(self,sensor,screen = None):
if sensor == 'a':
config = [None] * 28
_regName = ['A_TRIG_CFG','A_CTRL_REG1','A_CTRL_REG2','A_CTRL_REG3','A_CTRL_REG4','A_CTRL_REG5','A_ASPL_COUNT','A_F_SETUP','A_XYZ_DATA_CFG','A_HP_FILTER_CUTOFF','A_PL_CFG',
'A_PL_COUNT','A_PL_BF_ZCOMP','A_PL_THS_REG','A_FFMT_CFG','A_FFMT_THS','A_FFMT_COUNT','A_VECM_CFG','A_VECM_THS_MSB','A_TRANSIENT_CFG',
'A_TRANSIENT_THS','A_TRANSIENT_COUNT','A_PULSE_CFG','A_PULSE_TMLT','A_PULSE_LTCY','A_OFF_X','A_OFF_Y','A_OFF_Z']
config[0] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_TRIG_CFG)
config[1] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_CTRL_REG1)
config[2] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_CTRL_REG2)
config[3] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_CTRL_REG3)
config[4] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_CTRL_REG4)
config[5] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_CTRL_REG5)
config[6] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_ASPL_COUNT)
config[7] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_F_SETUP)
config[8] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_XYZ_DATA_CFG)
config[9] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_HP_FILTER_CUTOFF)
config[10] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_PL_CFG)
config[11] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_PL_COUNT)
config[12] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_PL_BF_ZCOMP)
config[13] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_PL_THS_REG)
config[14] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_FFMT_CFG)
config[15] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_FFMT_THS)
config[16] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_FFMT_COUNT)
config[17] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_VECM_CFG)
config[18] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_VECM_THS_MSB)
config[19] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_TRANSIENT_CFG)
config[20] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_TRANSIENT_THS)
config[21] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_TRANSIENT_COUNT)
config[22] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_PULSE_CFG)
config[23] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_PULSE_TMLT)
config[24] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_PULSE_LTCY)
config[25] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OFF_X)
config[26] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OFF_Y)
config[27] = self._i2cBus.read_byte_data(I2C_AM_ADDRESS,A_OFF_Z)
if sensor == 'm':
config = [None] * 15
_regName = ['M_OFF_X_MSB','M_OFF_X_LSB','M_OFF_Y_MSB','M_OFF_Y_LSB','M_OFF_Z_MSB','M_OFF_Z_LSB','M_THS_CFG','M_THS_COUNT',
'M_CTRL_REG1','M_CTRL_REG2','M_CTRL_REG3','M_VECM_CFG','M_VECM_THS_MSB','M_VECM_THS_LSB','M_VECM_CNT']
config[0] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_OFF_X_MSB)
config[1] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_OFF_X_LSB)
config[2] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_OFF_Y_MSB)
config[3] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_OFF_Y_LSB)
config[4] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_OFF_Z_MSB)
config[5] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_OFF_Z_LSB)
config[6] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_THS_CFG)
config[7] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_THS_COUNT)
config[8] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_CTRL_REG1)
config[9] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_CTRL_REG2)
config[10] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_CTRL_REG3)
config[11] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_VECM_CFG)
config[12] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_VECM_THS_MSB)
config[13] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_VECM_THS_LSB)
config[14] = self._i2cBus.read_byte_data (I2C_AM_ADDRESS,M_VECM_CNT)
if sensor == 'g':
config = [None] * 8
_regName = ['G_F_SETUP','G_CTRL_REG0','G_RT_CFG','G_RT_THS','G_RT_COUNT','G_CTRL_REG1','G_CTRL_REG2','G_CTRL_REG3']
config[0] = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_F_SETUP)
config[1] = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_CTRL_REG0)
config[2] = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_RT_CFG)
config[3] = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_RT_THS)
config[4] = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_RT_COUNT)
config[5] = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_CTRL_REG1)
config[6] = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_CTRL_REG2)
config[7] = self._i2cBus.read_byte_data(I2C_G_ADDRESS,G_CTRL_REG3)
if screen == 1:
for i,reg in enumerate(_regName):
print reg + ': ' + str('0x{:02x}'.format(config[i]))
return config
| gpl-2.0 | 1,654,278,061,756,991,200 | -5,602,724,171,338,765,000 | 33.067516 | 377 | 0.63239 | false |
astraw/stdeb | stdeb/command/install_deb.py | 1 | 1027 | import glob
import os
import stdeb.util as util
from distutils.core import Command
__all__ = ['install_deb']
class install_deb(Command):
description = 'distutils command to install debian binary package'
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# generate .deb file
self.run_command('bdist_deb')
# get relevant options passed to sdist_dsc
sdist_dsc = self.get_finalized_command('sdist_dsc')
# execute system command and read output
# (execute and read output of find cmd)
target_debs = glob.glob(os.path.join(sdist_dsc.dist_dir, '*.deb'))
if len(target_debs) == 0:
raise ValueError('could not find .deb file')
for target_deb in target_debs:
# define system command to execute (install .deb binary pkg)
syscmd = ['dpkg', '--install', target_deb]
util.process_command(syscmd)
| mit | 8,861,496,087,419,735,000 | -1,446,858,515,464,073,700 | 25.333333 | 74 | 0.620253 | false |
tian2992/flask-sockets | flask_sockets.py | 2 | 1627 | # -*- coding: utf-8 -*-
def log_request(self):
log = self.server.log
if log:
if hasattr(log, 'info'):
log.info(self.format_request() + '\n')
else:
log.write(self.format_request() + '\n')
# Monkeys are made for freedom.
try:
import gevent
from geventwebsocket.gunicorn.workers import GeventWebSocketWorker as Worker
except ImportError:
pass
if 'gevent' in locals():
# Freedom-Patch logger for Gunicorn.
if hasattr(gevent, 'pywsgi'):
gevent.pywsgi.WSGIHandler.log_request = log_request
class SocketMiddleware(object):
def __init__(self, wsgi_app, socket):
self.ws = socket
self.app = wsgi_app
def __call__(self, environ, start_response):
path = environ['PATH_INFO']
if path in self.ws.url_map:
handler = self.ws.url_map[path]
environment = environ['wsgi.websocket']
handler(environment)
return []
else:
return self.app(environ, start_response)
class Sockets(object):
def __init__(self, app=None):
self.url_map = {}
if app:
self.init_app(app)
def init_app(self, app):
app.wsgi_app = SocketMiddleware(app.wsgi_app, self)
def route(self, rule, **options):
def decorator(f):
endpoint = options.pop('endpoint', None)
self.add_url_rule(rule, endpoint, f, **options)
return f
return decorator
def add_url_rule(self, rule, _, f, **options):
self.url_map[rule] = f
# CLI sugar.
if 'Worker' in locals():
worker = Worker
| mit | 3,638,036,130,038,145,500 | -5,262,286,900,439,776,000 | 22.926471 | 80 | 0.579594 | false |
rdipietro/tensorflow | tensorflow/g3doc/how_tos/adding_an_op/zero_out_1_test.py | 56 | 1441 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test for version 1 of the zero_out op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
import tensorflow as tf
from tensorflow.g3doc.how_tos.adding_an_op import zero_out_op_1
class ZeroOut1Test(tf.test.TestCase):
def test(self):
with self.test_session():
result = zero_out_op_1.zero_out([5, 4, 3, 2, 1])
self.assertAllEqual(result.eval(), [5, 0, 0, 0, 0])
def testLoadTwice(self):
zero_out_loaded_again = tf.load_op_library(os.path.join(
tf.resource_loader.get_data_files_path(), 'zero_out_op_kernel_1.so'))
self.assertEqual(zero_out_loaded_again, zero_out_op_1._zero_out_module)
if __name__ == '__main__':
tf.test.main()
| apache-2.0 | -1,959,611,421,864,113,200 | -4,286,847,056,959,010,000 | 33.309524 | 80 | 0.674532 | false |
steedos/odoo | addons/calendar/__init__.py | 391 | 1038 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import calendar
import controllers
import contacts
| agpl-3.0 | 9,049,310,733,199,932,000 | -7,311,696,557,194,132,000 | 42.25 | 78 | 0.61368 | false |
yookoala/ibus-cangjie | src/engine.py | 1 | 16581 | # Copyright (c) 2012-2013 - The IBus Cangjie authors
#
# This file is part of ibus-cangjie, the IBus Cangjie input method engine.
#
# ibus-cangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ibus-cangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ibus-cangjie. If not, see <http://www.gnu.org/licenses/>.
__all__ = ["EngineCangjie", "EngineQuick"]
from operator import attrgetter
from gi.repository import GLib
from gi.repository import IBus
try:
import pycanberra
except ImportError:
# Too bad, the user won't get sound feedback on errors
pass
import cangjie
from .config import Config, properties
def is_inputnumber(keyval):
"""Is the `keyval` param a numeric input, e.g to select a candidate."""
return keyval in range(getattr(IBus, "0"), getattr(IBus, "9")+1)
class Engine(IBus.Engine):
"""The base class for Cangjie and Quick engines."""
def __init__(self):
super(Engine, self).__init__()
self.config = Config(IBus.Bus(), self.config_name,
self.on_value_changed)
self.current_input = ""
self.current_radicals = ""
self.clear_on_next_input = False
self.lookuptable = IBus.LookupTable()
self.lookuptable.set_page_size(9)
self.lookuptable.set_round(True)
self.lookuptable.set_orientation(IBus.Orientation.VERTICAL)
self.init_properties()
self.init_cangjie()
def init_properties(self):
self.prop_list = IBus.PropList()
for p in properties:
key = p["name"]
stored_value = self.config.read(key)
state = IBus.PropState.CHECKED if stored_value else IBus.PropState.UNCHECKED
try:
# Try the new constructor from IBus >= 1.5
prop = IBus.Property(key=key,
prop_type=IBus.PropType.TOGGLE,
label=p["label"],
icon='',
sensitive=True,
visible=True,
state=state,
sub_props=None)
except TypeError:
# IBus 1.4.x didn't have the GI overrides for the nice
# constructor, so let's do it the old, non-pythonic way.
# IBus.Property.new(key, type, label, icon, tooltip,
# sensitive, visible, state, sub_props)
prop = IBus.Property.new(key, IBus.PropType.TOGGLE,
IBus.Text.new_from_string(p["label"]),
'', IBus.Text.new_from_string(''),
True, True, state, None)
self.prop_list.append(prop)
def do_property_activate(self, prop_name, state):
active = state == IBus.PropState.CHECKED
self.config.write(prop_name, GLib.Variant("b", active))
def do_focus_in(self):
self.register_properties(self.prop_list)
def init_cangjie(self):
version = self.config.read("version").unpack()
version = getattr(cangjie.versions, "CANGJIE%d"%version)
filters = (cangjie.filters.BIG5 | cangjie.filters.HKSCS
| cangjie.filters.PUNCTUATION)
if self.config.read("include_allzh"):
filters |= cangjie.filters.CHINESE
if self.config.read("include_jp"):
filters |= cangjie.filters.KANJI
filters |= cangjie.filters.HIRAGANA
filters |= cangjie.filters.KATAKANA
if self.config.read("include_zhuyin"):
filters |= cangjie.filters.ZHUYIN
if self.config.read("include_symbols"):
filters |= cangjie.filters.SYMBOLS
self.cangjie = cangjie.Cangjie(version, filters)
def on_value_changed(self, config, section, name, value, data):
if section != self.config.config_section:
return
self.init_cangjie()
def do_focus_out (self):
"""Handle focus out event
This happens, for example, when switching between application windows
or input contexts.
Such events should clear the current input.
"""
self.clear_current_input()
def do_cancel_input(self):
"""Cancel the current input.
However, if there isn't any current input, then we shouldn't try to do
anything at all, so that the key can fulfill its original function.
"""
if not self.current_input:
return False
self.clear_current_input()
return True
def do_page_down(self):
"""Present the next page of candidates.
However, if there isn't any current input, then we shouldn't try to do
anything at all, so that the key can fulfill its original function.
"""
if not self.lookuptable.get_number_of_candidates():
return False
self.lookuptable.page_down()
self.update_lookup_table()
self.update_auxiliary_text()
return True
def do_page_up(self):
"""Present the previous page of candidates.
However, if there isn't any current input, then we shouldn't try to do
anything at all, so that the key can fulfill its original function.
"""
if not self.lookuptable.get_number_of_candidates():
return False
self.lookuptable.page_up()
self.update_lookup_table()
self.update_auxiliary_text()
return True
def do_backspace(self):
"""Go back from one input character.
This doesn't cancel the current input, only removes the last
user-inputted character from the current input, and clear the list of
candidates.
However, if there isn't any pre-edit, then we shouldn't handle the
backspace key at all, so that it can fulfill its original function:
deleting characters backwards.
"""
if not self.current_input:
return False
self.update_current_input(drop=1)
self.lookuptable.clear()
self.update_lookup_table()
return True
def do_space(self):
"""Handle the space key.
This is our "power key". It implements most of the behaviour behind
Cangjie and Quick.
It can be used to fetch the candidates if there are none, scroll to
the next page of candidates if appropriate or just commit the first
candidate when we have only one page.
Of course, it can also be used to input a "space" character.
"""
if not self.current_input:
return self.do_fullwidth_char(" ")
if not self.lookuptable.get_number_of_candidates():
try:
self.get_candidates()
except (cangjie.errors.CangjieNoCharsError,
cangjie.errors.CangjieInvalidInputError):
self.play_error_bell()
self.clear_on_next_input = True
return True
if self.lookuptable.get_number_of_candidates() <= 9:
self.do_select_candidate(1)
return True
self.do_page_down()
return True
def do_number(self, keyval):
"""Handle numeric input."""
if self.lookuptable.get_number_of_candidates():
return self.do_select_candidate(int(IBus.keyval_to_unicode(keyval)))
return self.do_fullwidth_char(IBus.keyval_to_unicode(keyval))
def do_other_key(self, keyval):
"""Handle all otherwise unhandled key presses."""
c = IBus.keyval_to_unicode(keyval)
if not c or c == '\n' or c == '\r':
return False
if not self.lookuptable.get_number_of_candidates() and \
self.current_input:
# FIXME: This is really ugly
if len(self.current_input) == 1 and \
not self.cangjie.is_input_key(self.current_input):
self.get_candidates(by_shortcode=True)
else:
self.get_candidates()
if self.lookuptable.get_number_of_candidates():
self.do_select_candidate(1)
return self.do_fullwidth_char(IBus.keyval_to_unicode(keyval))
def do_fullwidth_char(self, inputchar):
"""Commit the full-width version of an input character."""
if self.config.read("halfwidth_chars"):
return False
self.update_current_input(append=inputchar)
try:
self.get_candidates(code=inputchar, by_shortcode=True)
except cangjie.errors.CangjieNoCharsError:
self.clear_current_input()
return False
return True
def do_select_candidate(self, index):
"""Commit the selected candidate.
Parameter `index` is the number entered by the user corresponding to
the character she wishes to select on the current page.
Note: user-visible index starts at 1, but start at 0 in the lookup
table.
"""
page_index = self.lookuptable.get_cursor_pos()
selected = self.lookuptable.get_candidate(page_index+index-1)
self.commit_text(selected)
self.clear_current_input()
return True
def do_process_key_event(self, keyval, keycode, state):
"""Handle `process-key-event` events.
This event is fired when the user presses a key.
"""
# Ignore key release events
if (state & IBus.ModifierType.RELEASE_MASK):
return False
# Work around integer overflow bug on 32 bits systems:
# https://bugzilla.gnome.org/show_bug.cgi?id=693121
# The bug is fixed in pygobject 3.7.91, but many distributions will
# ship the previous version for some time. (e.g Fedora 18)
if (state & 1073741824):
return False
if state & (IBus.ModifierType.CONTROL_MASK |
IBus.ModifierType.MOD1_MASK):
# Ignore Alt+<key> and Ctrl+<key>
return False
if keyval == IBus.Escape:
return self.do_cancel_input()
if keyval == IBus.space:
return self.do_space()
if keyval == IBus.Page_Down:
return self.do_page_down()
if keyval == IBus.Page_Up:
return self.do_page_up()
if keyval == IBus.BackSpace:
return self.do_backspace()
if is_inputnumber(keyval):
return self.do_number(keyval)
c = IBus.keyval_to_unicode(keyval)
if c and c == "*":
return self.do_star()
if c and self.cangjie.is_input_key(c):
return self.do_inputchar(c)
return self.do_other_key(keyval)
def clear_current_input(self):
"""Clear the current input."""
self.current_input = ""
self.current_radicals = ""
self.clear_on_next_input = False
self.update_lookup_table()
self.update_auxiliary_text()
def update_current_input(self, append=None, drop=None):
"""Update the current input."""
if append is not None:
if self.clear_on_next_input:
self.clear_current_input()
if len(self.current_input) < self.input_max_len:
self.current_input += append
try:
self.current_radicals += self.cangjie.get_radical(append)
except cangjie.errors.CangjieInvalidInputError:
# That character doesn't have a radical
self.current_radicals += append
else:
self.play_error_bell()
elif drop is not None:
self.clear_on_next_input = False
self.current_input = self.current_input[:-drop]
self.current_radicals = self.current_radicals[:-drop]
else:
raise ValueError("You must specify either 'append' or 'drop'")
self.update_auxiliary_text()
def get_candidates(self, code=None, by_shortcode=False):
"""Get the candidates based on the user input.
If the optional `code` parameter is not specified, then use the
current input instead.
"""
self.lookuptable.clear()
num_candidates = 0
if not code:
code = self.current_input
if not by_shortcode:
chars = self.cangjie.get_characters(code)
else:
chars = self.cangjie.get_characters_by_shortcode(code)
for c in sorted(chars, key=attrgetter("frequency"), reverse=True):
self.lookuptable.append_candidate(IBus.Text.new_from_string(c.chchar))
num_candidates += 1
if num_candidates == 1:
self.do_select_candidate(1)
else:
# More than one candidate, display them
self.update_lookup_table()
def update_preedit_text(self):
"""Update the preedit text.
This is never used with Cangjie and Quick, so let's nullify it
completely, in case something else in the IBus machinery calls it.
"""
pass
def update_auxiliary_text(self):
"""Update the auxiliary text.
This should contain the radicals for the current input.
"""
text = IBus.Text.new_from_string(self.current_radicals)
super(Engine, self).update_auxiliary_text(text, len(self.current_radicals)>0)
def update_lookup_table(self):
"""Update the lookup table."""
if not self.current_input:
self.lookuptable.clear()
num_candidates = self.lookuptable.get_number_of_candidates()
super(Engine, self).update_lookup_table(self.lookuptable,
num_candidates>0)
def play_error_bell(self):
"""Play an error sound, to notify the user of invalid input."""
try:
if not hasattr(self, "canberra"):
self.canberra = pycanberra.Canberra()
self.canberra.play(1, pycanberra.CA_PROP_EVENT_ID, "dialog-error",
pycanberra.CA_PROP_MEDIA_ROLE, "error", None)
except:
# Playing a sound is a nice indication for the user, but if it
# fails, it should never take down the input system
pass
class EngineCangjie(Engine):
"""The Cangjie engine."""
__gtype_name__ = "EngineCangjie"
config_name = "cangjie"
input_max_len = 5
def do_inputchar(self, inputchar):
"""Handle user input of valid Cangjie input characters."""
if self.lookuptable.get_number_of_candidates():
self.do_select_candidate(1)
self.update_current_input(append=inputchar)
return True
def do_star(self):
"""Handle the star key (*)
For Cangjie, this can in some cases be a wildcard key.
"""
if self.current_input:
return self.do_inputchar("*")
return self.do_other_key(IBus.asterisk)
class EngineQuick(Engine):
"""The Quick engine."""
__gtype_name__ = "EngineQuick"
config_name = "quick"
input_max_len = 2
def do_inputchar(self, inputchar):
"""Handle user input of valid Cangjie input characters."""
if self.lookuptable.get_number_of_candidates():
self.do_select_candidate(1)
if len(self.current_input) < self.input_max_len:
self.update_current_input(append=inputchar)
# Now that we appended/committed, let's check the new length
if len(self.current_input) == self.input_max_len:
current_input = "*".join(self.current_input)
try:
self.get_candidates(current_input)
except cangjie.errors.CangjieNoCharsError:
self.play_error_bell()
self.clear_on_next_input = True
return True
def do_star(self):
"""Handle the star key (*)
For Quick, this should just be considered as any other key.
"""
return self.do_other_key(IBus.asterisk)
| gpl-3.0 | 1,228,505,371,886,392,300 | 6,296,412,423,966,518,000 | 31.89881 | 88 | 0.58947 | false |
TedaLIEz/sentry | src/sentry/migrations/0042_auto__add_projectcountbyminute__add_unique_projectcountbyminute_projec.py | 36 | 17255 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ProjectCountByMinute'
db.create_table('sentry_projectcountbyminute', (
('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)),
('project', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Project'], null=True)),
('date', self.gf('django.db.models.fields.DateTimeField')()),
('times_seen', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
('time_spent_total', self.gf('django.db.models.fields.FloatField')(default=0)),
('time_spent_count', self.gf('django.db.models.fields.IntegerField')(default=0)),
))
db.send_create_signal('sentry', ['ProjectCountByMinute'])
# Adding unique constraint on 'ProjectCountByMinute', fields ['project', 'date']
db.create_unique('sentry_projectcountbyminute', ['project_id', 'date'])
def backwards(self, orm):
# Removing unique constraint on 'ProjectCountByMinute', fields ['project', 'date']
db.delete_unique('sentry_projectcountbyminute', ['project_id', 'date'])
# Deleting model 'ProjectCountByMinute'
db.delete_table('sentry_projectcountbyminute')
models = {
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'time_spent': ('django.db.models.fields.FloatField', [], {'null': 'True'})
},
'sentry.filtervalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'FilterValue'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'logger', 'culprit', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'views': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.View']", 'symmetrical': 'False', 'blank': 'True'})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.messagecountbyminute': {
'Meta': {'unique_together': "(('project', 'group', 'date'),)", 'object_name': 'MessageCountByMinute'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.messagefiltervalue': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'MessageFilterValue'},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.messageindex': {
'Meta': {'unique_together': "(('column', 'value', 'object_id'),)", 'object_name': 'MessageIndex'},
'column': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.project': {
'Meta': {'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_owned_project_set'", 'null': 'True', 'to': "orm['sentry.User']"}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.projectcountbyminute': {
'Meta': {'unique_together': "(('project', 'date'),)", 'object_name': 'ProjectCountByMinute'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.projectdomain': {
'Meta': {'unique_together': "(('project', 'domain'),)", 'object_name': 'ProjectDomain'},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'domain_set'", 'to': "orm['sentry.Project']"})
},
'sentry.projectmember': {
'Meta': {'unique_together': "(('project', 'user'),)", 'object_name': 'ProjectMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_project_set'", 'to': "orm['sentry.User']"})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.searchdocument': {
'Meta': {'unique_together': "(('project', 'group'),)", 'object_name': 'SearchDocument'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_changed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'total_events': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
'sentry.searchtoken': {
'Meta': {'unique_together': "(('document', 'field', 'token'),)", 'object_name': 'SearchToken'},
'document': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'token_set'", 'to': "orm['sentry.SearchDocument']"}),
'field': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '64'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'sentry.view': {
'Meta': {'object_name': 'View'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'verbose_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'verbose_name_plural': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'})
}
}
complete_apps = ['sentry']
| bsd-3-clause | -4,793,655,800,550,319,000 | -7,937,879,889,749,120,000 | 78.884259 | 167 | 0.561518 | false |
VPAC/pytsm | pytsm/actions/__init__.py | 1 | 1692 | # Copyright 2012-2014 VPAC
#
# This file is part of pytsm.
#
# pytsm is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pytsm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pytsm. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import importlib
import sys
import codecs
import locale
def load_command(name):
assert not name.startswith("_")
assert name.find(".") == -1
mod = importlib.import_module('pytsm.actions.commands.' + name)
return mod.Command
def command_line(argv=None):
if sys.version_info < (3, 0):
# for python2 we need to use correct encoding when writing to stdout
encoding = locale.getpreferredencoding()
Writer = codecs.getwriter(encoding)
sys.stdout = Writer(sys.stdout)
if argv is None:
argv = sys.argv
try:
command = argv[1]
except IndexError:
command = "help"
args = argv[2:]
try:
klass = load_command(command)
except ImportError:
print("Unknown command %s." % command, file=sys.stderr)
return 255
obj = klass()
rc = obj.execute(argv[0], command, args)
return rc
| gpl-3.0 | 8,343,107,677,832,366,000 | 718,850,237,879,107,600 | 27.677966 | 76 | 0.685579 | false |
DataCanvasIO/example-modules | modules/modeling/basic/linear_svc_estimator/main.py | 2 | 1630 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import random
from specparser import get_settings_from_file
from pprint import pprint
import csv
from sklearn.svm import LinearSVC
import numpy as np
from sklearn.externals import joblib
import matplotlib
matplotlib.use('Agg')
import datetime
from matplotlib.backends.backend_pdf import PdfPages
import matplotlib.pyplot as plt
def drawPrecisionRecall(X,Y,output_file):
pdf = PdfPages(output_file)
plt.figure(figsize=(len(Y), len(X)))
plt.plot(Y, X, 'r-o')
plt.title('Precision/Recall')
pdf.savefig() # saves the current figure into a pdf page
plt.close()
pdf.close()
def readcolumn(filename):
column = []
with open(filename,"r") as fconcl:
for line in fconcl:
column.append(line.rstrip('\n'))
return column
def main():
settings = get_settings_from_file("spec.json")
print(settings)
X = np.genfromtxt(settings.Input.X, delimiter=',', skip_header=1)
svc = joblib.load(settings.Input.MODEL)
Y_out = svc.predict(X)
Y_list = [Y_out]
np.savetxt("./conclusion.csv", Y_out, fmt="%d", delimiter=",")
conclusion = readcolumn("./conclusion.csv")
label = readcolumn(settings.Input.Y)
precision_list = []
recall_list = []
hits = 0
for i in range(len(label)):
if conclusion[i] == label[i]:
hits+=1
precision_list.append(1.0*hits/(i+1))
recall_list.append(1.0*hits/(len(label)))
drawPrecisionRecall(precision_list,recall_list,settings.Output.report)
print("Done")
if __name__ == "__main__":
main()
| bsd-3-clause | -1,574,786,355,497,550,800 | -5,545,647,148,892,049,000 | 25.290323 | 74 | 0.642331 | false |
pablo-co/insight-jobs | process_stops.py | 1 | 3206 | import sys
import traceback
from bhulan.processVehicles import importTrucks, initCompute
from bhulan.processStops import saveComputedStops
from bhulan.util import notify, getTime
from pymongo import MongoClient
from bhulan.constants import WATTS_DATA_DB_KEY
from bhulan.inputOutput import saveStopsToFile
import numpy as np
from bhulan.merger import merger
import getopt
import warnings
import string
import random
db = WATTS_DATA_DB_KEY
def trucks(filename):
importTrucks(filename=filename)
def compute():
initCompute()
def stops():
saveComputedStops()
return 0
def run(func, args):
messages = {
trucks: "import trucks ",
compute: "compute truck dates and centers ",
stops: "compute stops and properties"
}
message = messages[func]
try:
getTime(func, message, *args)
# func(*args)
# notify(message)
except:
print traceback.format_exc()
notify(message + "failed")
def setupAll(input_file_name):
try:
run(trucks, [input_file_name])
run(compute, [])
run(stops, [])
notify("complete setup succeeded!")
except:
print traceback.format_exc()
notify("complete setup failed...")
##
# deletes the database and cleans up the collections
def dataPurge(db):
client = MongoClient()
client.drop_database(db)
def main(argv):
input_file_name = "input.csv"
output_file_name = "output.csv"
existing_file_name = "existing.csv"
non_existing_file_name = "non_existing.csv"
hash_name = ''.join(random.choice(string.ascii_uppercase) for i in range(24))
try:
opts, args = getopt.getopt(argv, "i:o:e:n:", ["input=", "output=", "existing=", "non_existing="])
except getopt.GetoptError:
sys.exit(2)
for opt, arg in opts:
if opt in ("-i", "--input"):
input_file_name = arg
elif opt in ("-e", "--existing"):
existing_file_name = arg
elif opt in ("-n", "--non_existing"):
non_existing_file_name = arg
elif opt in ("-o", "--output"):
output_file_name = arg
dataPurge(db)
setupAll(input_file_name)
run(trucks, [input_file_name])
run(stops, [])
run(compute, [])
with warnings.catch_warnings():
warnings.simplefilter("ignore")
exc = np.array([])
try:
exc = np.genfromtxt(non_existing_file_name, dtype=None, delimiter=',')
except:
print 'Non existing empty'
exist = np.genfromtxt(existing_file_name, dtype=None, delimiter=',')
i = exist.min()
while i < (exist.max() + 1):
if i not in exc:
saveStopsToFile(hash_name, i)
i += 1
# saveStopsToFile(216)
# if len(sys.argv) == 2:
# if sys.argv[1] == "all":
# getTime(setupAll, "Ran complete setup")
# if sys.argv[1] == "trucks":
# run(trucks, [])
# if sys.argv[1] == "stops":
# run(stops, [])
# if sys.argv[1] == "compute":
# run(compute, [])
merger(existing_file_name, output_file_name, hash_name)
if __name__ == "__main__":
main(sys.argv[1:])
| mit | -4,975,221,968,337,602,000 | 5,889,062,815,195,605,000 | 24.046875 | 105 | 0.591391 | false |
Matty-Downing2169/opencamlib | scripts/batchdropcutter_test_2.py | 7 | 2142 | import ocl
import pyocl
import camvtk
import time
import vtk
import datetime
import math
if __name__ == "__main__":
print ocl.revision()
myscreen = camvtk.VTKScreen()
stl = camvtk.STLSurf("../stl/gnu_tux_mod.stl")
#stl = camvtk.STLSurf("../stl/beet_mm.stl")
#stl = camvtk.STLSurf("../stl/Blade.stl")
myscreen.addActor(stl)
stl.SetWireframe()
stl.SetColor((0.5,0.5,0.5))
polydata = stl.src.GetOutput()
s = ocl.STLSurf()
camvtk.vtkPolyData2OCLSTL(polydata, s)
print "STL surface read ", s.size(), " triangles"
length=5
cutter = ocl.BallCutter(1.4321, length)
#cutter = ocl.CylCutter(1.123, length)
#cutter = ocl.BullCutter(1.4123, 0.5, length)
#cutter = ocl.ConeCutter(0.43, math.pi/7, length)
print cutter
minx=0
dx=0.06
maxx=9
miny=0
dy=1
maxy=12
z=-5
clpoints = pyocl.CLPointGrid(minx,dx,maxx,miny,dy,maxy,z)
print "generated grid with", len(clpoints)," CL-points"
# batchdropcutter
bdc1 = ocl.BatchDropCutter()
bdc1.setSTL(s)
bdc1.setCutter(cutter)
for p in clpoints:
bdc1.appendPoint(p)
t_before = time.time()
bdc1.run()
t_after = time.time()
calctime = t_after-t_before
print " done in ", calctime," s"
clpts = bdc1.getCLPoints()
print "rendering...",
camvtk.drawCLPointCloud(myscreen, clpts)
print "done"
myscreen.camera.SetPosition(3, 23, 15)
myscreen.camera.SetFocalPoint(4, 5, 0)
t = camvtk.Text()
t.SetText("OpenCAMLib")
t.SetPos( (myscreen.width-200, myscreen.height-30) )
myscreen.addActor( t)
t2 = camvtk.Text()
stltext = "%i triangles\n%i CL-points\n%0.1f seconds" % (s.size(), len(clpts), calctime)
t2.SetText(stltext)
t2.SetPos( (50, myscreen.height-200) )
myscreen.addActor( t2)
t3 = camvtk.Text()
ctext = "Cutter: %s" % ( str(cutter) )
t3.SetText(ctext)
t3.SetPos( (50, myscreen.height-250) )
myscreen.addActor( t3)
myscreen.render()
myscreen.iren.Start()
raw_input("Press Enter to terminate")
| gpl-3.0 | 7,755,219,758,721,243,000 | -3,707,975,991,533,805,600 | 25.121951 | 92 | 0.607843 | false |
spisneha25/django | django/utils/dateformat.py | 365 | 10712 | """
PHP date() style date formatting
See http://www.php.net/date for format strings
Usage:
>>> import datetime
>>> d = datetime.datetime.now()
>>> df = DateFormat(d)
>>> print(df.format('jS F Y H:i'))
7th October 2003 11:39
>>>
"""
from __future__ import unicode_literals
import calendar
import datetime
import re
import time
from django.utils import six
from django.utils.dates import (
MONTHS, MONTHS_3, MONTHS_ALT, MONTHS_AP, WEEKDAYS, WEEKDAYS_ABBR,
)
from django.utils.encoding import force_text
from django.utils.timezone import get_default_timezone, is_aware, is_naive
from django.utils.translation import ugettext as _
re_formatchars = re.compile(r'(?<!\\)([aAbBcdDeEfFgGhHiIjlLmMnNoOPrsStTUuwWyYzZ])')
re_escaped = re.compile(r'\\(.)')
class Formatter(object):
def format(self, formatstr):
pieces = []
for i, piece in enumerate(re_formatchars.split(force_text(formatstr))):
if i % 2:
pieces.append(force_text(getattr(self, piece)()))
elif piece:
pieces.append(re_escaped.sub(r'\1', piece))
return ''.join(pieces)
class TimeFormat(Formatter):
def __init__(self, obj):
self.data = obj
self.timezone = None
# We only support timezone when formatting datetime objects,
# not date objects (timezone information not appropriate),
# or time objects (against established django policy).
if isinstance(obj, datetime.datetime):
if is_naive(obj):
self.timezone = get_default_timezone()
else:
self.timezone = obj.tzinfo
def a(self):
"'a.m.' or 'p.m.'"
if self.data.hour > 11:
return _('p.m.')
return _('a.m.')
def A(self):
"'AM' or 'PM'"
if self.data.hour > 11:
return _('PM')
return _('AM')
def B(self):
"Swatch Internet time"
raise NotImplementedError('may be implemented in a future release')
def e(self):
"""
Timezone name.
If timezone information is not available, this method returns
an empty string.
"""
if not self.timezone:
return ""
try:
if hasattr(self.data, 'tzinfo') and self.data.tzinfo:
# Have to use tzinfo.tzname and not datetime.tzname
# because datatime.tzname does not expect Unicode
return self.data.tzinfo.tzname(self.data) or ""
except NotImplementedError:
pass
return ""
def f(self):
"""
Time, in 12-hour hours and minutes, with minutes left off if they're
zero.
Examples: '1', '1:30', '2:05', '2'
Proprietary extension.
"""
if self.data.minute == 0:
return self.g()
return '%s:%s' % (self.g(), self.i())
def g(self):
"Hour, 12-hour format without leading zeros; i.e. '1' to '12'"
if self.data.hour == 0:
return 12
if self.data.hour > 12:
return self.data.hour - 12
return self.data.hour
def G(self):
"Hour, 24-hour format without leading zeros; i.e. '0' to '23'"
return self.data.hour
def h(self):
"Hour, 12-hour format; i.e. '01' to '12'"
return '%02d' % self.g()
def H(self):
"Hour, 24-hour format; i.e. '00' to '23'"
return '%02d' % self.G()
def i(self):
"Minutes; i.e. '00' to '59'"
return '%02d' % self.data.minute
def O(self):
"""
Difference to Greenwich time in hours; e.g. '+0200', '-0430'.
If timezone information is not available, this method returns
an empty string.
"""
if not self.timezone:
return ""
seconds = self.Z()
sign = '-' if seconds < 0 else '+'
seconds = abs(seconds)
return "%s%02d%02d" % (sign, seconds // 3600, (seconds // 60) % 60)
def P(self):
"""
Time, in 12-hour hours, minutes and 'a.m.'/'p.m.', with minutes left off
if they're zero and the strings 'midnight' and 'noon' if appropriate.
Examples: '1 a.m.', '1:30 p.m.', 'midnight', 'noon', '12:30 p.m.'
Proprietary extension.
"""
if self.data.minute == 0 and self.data.hour == 0:
return _('midnight')
if self.data.minute == 0 and self.data.hour == 12:
return _('noon')
return '%s %s' % (self.f(), self.a())
def s(self):
"Seconds; i.e. '00' to '59'"
return '%02d' % self.data.second
def T(self):
"""
Time zone of this machine; e.g. 'EST' or 'MDT'.
If timezone information is not available, this method returns
an empty string.
"""
if not self.timezone:
return ""
name = self.timezone.tzname(self.data) if self.timezone else None
if name is None:
name = self.format('O')
return six.text_type(name)
def u(self):
"Microseconds; i.e. '000000' to '999999'"
return '%06d' % self.data.microsecond
def Z(self):
"""
Time zone offset in seconds (i.e. '-43200' to '43200'). The offset for
timezones west of UTC is always negative, and for those east of UTC is
always positive.
If timezone information is not available, this method returns
an empty string.
"""
if not self.timezone:
return ""
offset = self.timezone.utcoffset(self.data)
# `offset` is a datetime.timedelta. For negative values (to the west of
# UTC) only days can be negative (days=-1) and seconds are always
# positive. e.g. UTC-1 -> timedelta(days=-1, seconds=82800, microseconds=0)
# Positive offsets have days=0
return offset.days * 86400 + offset.seconds
class DateFormat(TimeFormat):
year_days = [None, 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334]
def b(self):
"Month, textual, 3 letters, lowercase; e.g. 'jan'"
return MONTHS_3[self.data.month]
def c(self):
"""
ISO 8601 Format
Example : '2008-01-02T10:30:00.000123'
"""
return self.data.isoformat()
def d(self):
"Day of the month, 2 digits with leading zeros; i.e. '01' to '31'"
return '%02d' % self.data.day
def D(self):
"Day of the week, textual, 3 letters; e.g. 'Fri'"
return WEEKDAYS_ABBR[self.data.weekday()]
def E(self):
"Alternative month names as required by some locales. Proprietary extension."
return MONTHS_ALT[self.data.month]
def F(self):
"Month, textual, long; e.g. 'January'"
return MONTHS[self.data.month]
def I(self):
"'1' if Daylight Savings Time, '0' otherwise."
if self.timezone and self.timezone.dst(self.data):
return '1'
else:
return '0'
def j(self):
"Day of the month without leading zeros; i.e. '1' to '31'"
return self.data.day
def l(self):
"Day of the week, textual, long; e.g. 'Friday'"
return WEEKDAYS[self.data.weekday()]
def L(self):
"Boolean for whether it is a leap year; i.e. True or False"
return calendar.isleap(self.data.year)
def m(self):
"Month; i.e. '01' to '12'"
return '%02d' % self.data.month
def M(self):
"Month, textual, 3 letters; e.g. 'Jan'"
return MONTHS_3[self.data.month].title()
def n(self):
"Month without leading zeros; i.e. '1' to '12'"
return self.data.month
def N(self):
"Month abbreviation in Associated Press style. Proprietary extension."
return MONTHS_AP[self.data.month]
def o(self):
"ISO 8601 year number matching the ISO week number (W)"
return self.data.isocalendar()[0]
def r(self):
"RFC 2822 formatted date; e.g. 'Thu, 21 Dec 2000 16:01:07 +0200'"
return self.format('D, j M Y H:i:s O')
def S(self):
"English ordinal suffix for the day of the month, 2 characters; i.e. 'st', 'nd', 'rd' or 'th'"
if self.data.day in (11, 12, 13): # Special case
return 'th'
last = self.data.day % 10
if last == 1:
return 'st'
if last == 2:
return 'nd'
if last == 3:
return 'rd'
return 'th'
def t(self):
"Number of days in the given month; i.e. '28' to '31'"
return '%02d' % calendar.monthrange(self.data.year, self.data.month)[1]
def U(self):
"Seconds since the Unix epoch (January 1 1970 00:00:00 GMT)"
if isinstance(self.data, datetime.datetime) and is_aware(self.data):
return int(calendar.timegm(self.data.utctimetuple()))
else:
return int(time.mktime(self.data.timetuple()))
def w(self):
"Day of the week, numeric, i.e. '0' (Sunday) to '6' (Saturday)"
return (self.data.weekday() + 1) % 7
def W(self):
"ISO-8601 week number of year, weeks starting on Monday"
# Algorithm from http://www.personal.ecu.edu/mccartyr/ISOwdALG.txt
week_number = None
jan1_weekday = self.data.replace(month=1, day=1).weekday() + 1
weekday = self.data.weekday() + 1
day_of_year = self.z()
if day_of_year <= (8 - jan1_weekday) and jan1_weekday > 4:
if jan1_weekday == 5 or (jan1_weekday == 6 and calendar.isleap(self.data.year - 1)):
week_number = 53
else:
week_number = 52
else:
if calendar.isleap(self.data.year):
i = 366
else:
i = 365
if (i - day_of_year) < (4 - weekday):
week_number = 1
else:
j = day_of_year + (7 - weekday) + (jan1_weekday - 1)
week_number = j // 7
if jan1_weekday > 4:
week_number -= 1
return week_number
def y(self):
"Year, 2 digits; e.g. '99'"
return six.text_type(self.data.year)[2:]
def Y(self):
"Year, 4 digits; e.g. '1999'"
return self.data.year
def z(self):
"Day of the year; i.e. '0' to '365'"
doy = self.year_days[self.data.month] + self.data.day
if self.L() and self.data.month > 2:
doy += 1
return doy
def format(value, format_string):
"Convenience function"
df = DateFormat(value)
return df.format(format_string)
def time_format(value, format_string):
"Convenience function"
tf = TimeFormat(value)
return tf.format(format_string)
| bsd-3-clause | -4,649,519,650,988,787,000 | -7,366,802,836,909,959,000 | 29.518519 | 102 | 0.555172 | false |
mfherbst/spack | var/spack/repos/builtin/packages/bwa/package.py | 2 | 2458 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Bwa(Package):
"""Burrow-Wheeler Aligner for pairwise alignment between DNA sequences."""
homepage = "http://github.com/lh3/bwa"
url = "https://github.com/lh3/bwa/releases/download/v0.7.15/bwa-0.7.15.tar.bz2"
version('0.7.17', '82cba7ef695538e6a38b9d4156837381')
version('0.7.16a', 'c5115c9a5ea0406848500e4b23a7708c')
version('0.7.15', 'fcf470a46a1dbe2f96a1c5b87c530554')
version('0.7.13', 'f094f609438511766c434178a3635ab4')
version('0.7.12', 'e24a587baaad411d5da89516ad7a261a',
url='https://github.com/lh3/bwa/archive/0.7.12.tar.gz')
depends_on('zlib')
def install(self, spec, prefix):
filter_file(r'^INCLUDES=',
"INCLUDES=-I%s" % spec['zlib'].prefix.include, 'Makefile')
filter_file(r'^LIBS=', "LIBS=-L%s " % spec['zlib'].prefix.lib,
'Makefile')
make()
mkdirp(prefix.bin)
install('bwa', join_path(prefix.bin, 'bwa'))
set_executable(join_path(prefix.bin, 'bwa'))
mkdirp(prefix.doc)
install('README.md', prefix.doc)
install('NEWS.md', prefix.doc)
mkdirp(prefix.man.man1)
install('bwa.1', prefix.man.man1)
| lgpl-2.1 | 403,209,278,664,683,600 | 5,702,131,020,669,694,000 | 42.122807 | 88 | 0.644833 | false |
grupoprog3/proyecto_final | Entrega Final/flask/Lib/site-packages/click/_termui_impl.py | 64 | 16377 | """
click._termui_impl
~~~~~~~~~~~~~~~~~~
This module contains implementations for the termui module. To keep the
import time of Click down, some infrequently used functionality is placed
in this module and only imported as needed.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import time
import math
from ._compat import _default_text_stdout, range_type, PY2, isatty, \
open_stream, strip_ansi, term_len, get_best_encoding, WIN
from .utils import echo
from .exceptions import ClickException
if os.name == 'nt':
BEFORE_BAR = '\r'
AFTER_BAR = '\n'
else:
BEFORE_BAR = '\r\033[?25l'
AFTER_BAR = '\033[?25h\n'
def _length_hint(obj):
"""Returns the length hint of an object."""
try:
return len(obj)
except TypeError:
try:
get_hint = type(obj).__length_hint__
except AttributeError:
return None
try:
hint = get_hint(obj)
except TypeError:
return None
if hint is NotImplemented or \
not isinstance(hint, (int, long)) or \
hint < 0:
return None
return hint
class ProgressBar(object):
def __init__(self, iterable, length=None, fill_char='#', empty_char=' ',
bar_template='%(bar)s', info_sep=' ', show_eta=True,
show_percent=None, show_pos=False, item_show_func=None,
label=None, file=None, color=None, width=30):
self.fill_char = fill_char
self.empty_char = empty_char
self.bar_template = bar_template
self.info_sep = info_sep
self.show_eta = show_eta
self.show_percent = show_percent
self.show_pos = show_pos
self.item_show_func = item_show_func
self.label = label or ''
if file is None:
file = _default_text_stdout()
self.file = file
self.color = color
self.width = width
self.autowidth = width == 0
if length is None:
length = _length_hint(iterable)
if iterable is None:
if length is None:
raise TypeError('iterable or length is required')
iterable = range_type(length)
self.iter = iter(iterable)
self.length = length
self.length_known = length is not None
self.pos = 0
self.avg = []
self.start = self.last_eta = time.time()
self.eta_known = False
self.finished = False
self.max_width = None
self.entered = False
self.current_item = None
self.is_hidden = not isatty(self.file)
self._last_line = None
def __enter__(self):
self.entered = True
self.render_progress()
return self
def __exit__(self, exc_type, exc_value, tb):
self.render_finish()
def __iter__(self):
if not self.entered:
raise RuntimeError('You need to use progress bars in a with block.')
self.render_progress()
return self
def render_finish(self):
if self.is_hidden:
return
self.file.write(AFTER_BAR)
self.file.flush()
@property
def pct(self):
if self.finished:
return 1.0
return min(self.pos / (float(self.length) or 1), 1.0)
@property
def time_per_iteration(self):
if not self.avg:
return 0.0
return sum(self.avg) / float(len(self.avg))
@property
def eta(self):
if self.length_known and not self.finished:
return self.time_per_iteration * (self.length - self.pos)
return 0.0
def format_eta(self):
if self.eta_known:
t = self.eta + 1
seconds = t % 60
t /= 60
minutes = t % 60
t /= 60
hours = t % 24
t /= 24
if t > 0:
days = t
return '%dd %02d:%02d:%02d' % (days, hours, minutes, seconds)
else:
return '%02d:%02d:%02d' % (hours, minutes, seconds)
return ''
def format_pos(self):
pos = str(self.pos)
if self.length_known:
pos += '/%s' % self.length
return pos
def format_pct(self):
return ('% 4d%%' % int(self.pct * 100))[1:]
def format_progress_line(self):
show_percent = self.show_percent
info_bits = []
if self.length_known:
bar_length = int(self.pct * self.width)
bar = self.fill_char * bar_length
bar += self.empty_char * (self.width - bar_length)
if show_percent is None:
show_percent = not self.show_pos
else:
if self.finished:
bar = self.fill_char * self.width
else:
bar = list(self.empty_char * (self.width or 1))
if self.time_per_iteration != 0:
bar[int((math.cos(self.pos * self.time_per_iteration)
/ 2.0 + 0.5) * self.width)] = self.fill_char
bar = ''.join(bar)
if self.show_pos:
info_bits.append(self.format_pos())
if show_percent:
info_bits.append(self.format_pct())
if self.show_eta and self.eta_known and not self.finished:
info_bits.append(self.format_eta())
if self.item_show_func is not None:
item_info = self.item_show_func(self.current_item)
if item_info is not None:
info_bits.append(item_info)
return (self.bar_template % {
'label': self.label,
'bar': bar,
'info': self.info_sep.join(info_bits)
}).rstrip()
def render_progress(self):
from .termui import get_terminal_size
nl = False
if self.is_hidden:
buf = [self.label]
nl = True
else:
buf = []
# Update width in case the terminal has been resized
if self.autowidth:
old_width = self.width
self.width = 0
clutter_length = term_len(self.format_progress_line())
new_width = max(0, get_terminal_size()[0] - clutter_length)
if new_width < old_width:
buf.append(BEFORE_BAR)
buf.append(' ' * self.max_width)
self.max_width = new_width
self.width = new_width
clear_width = self.width
if self.max_width is not None:
clear_width = self.max_width
buf.append(BEFORE_BAR)
line = self.format_progress_line()
line_len = term_len(line)
if self.max_width is None or self.max_width < line_len:
self.max_width = line_len
buf.append(line)
buf.append(' ' * (clear_width - line_len))
line = ''.join(buf)
# Render the line only if it changed.
if line != self._last_line:
self._last_line = line
echo(line, file=self.file, color=self.color, nl=nl)
self.file.flush()
def make_step(self, n_steps):
self.pos += n_steps
if self.length_known and self.pos >= self.length:
self.finished = True
if (time.time() - self.last_eta) < 1.0:
return
self.last_eta = time.time()
self.avg = self.avg[-6:] + [-(self.start - time.time()) / (self.pos)]
self.eta_known = self.length_known
def update(self, n_steps):
self.make_step(n_steps)
self.render_progress()
def finish(self):
self.eta_known = 0
self.current_item = None
self.finished = True
def next(self):
if self.is_hidden:
return next(self.iter)
try:
rv = next(self.iter)
self.current_item = rv
except StopIteration:
self.finish()
self.render_progress()
raise StopIteration()
else:
self.update(1)
return rv
if not PY2:
__next__ = next
del next
def pager(text, color=None):
"""Decide what method to use for paging through text."""
stdout = _default_text_stdout()
if not isatty(sys.stdin) or not isatty(stdout):
return _nullpager(stdout, text, color)
pager_cmd = (os.environ.get('PAGER', None) or '').strip()
if pager_cmd:
if WIN:
return _tempfilepager(text, pager_cmd, color)
return _pipepager(text, pager_cmd, color)
if os.environ.get('TERM') in ('dumb', 'emacs'):
return _nullpager(stdout, text, color)
if WIN or sys.platform.startswith('os2'):
return _tempfilepager(text, 'more <', color)
if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0:
return _pipepager(text, 'less', color)
import tempfile
fd, filename = tempfile.mkstemp()
os.close(fd)
try:
if hasattr(os, 'system') and os.system('more "%s"' % filename) == 0:
return _pipepager(text, 'more', color)
return _nullpager(stdout, text, color)
finally:
os.unlink(filename)
def _pipepager(text, cmd, color):
"""Page through text by feeding it to another program. Invoking a
pager through this might support colors.
"""
import subprocess
env = dict(os.environ)
# If we're piping to less we might support colors under the
# condition that
cmd_detail = cmd.rsplit('/', 1)[-1].split()
if color is None and cmd_detail[0] == 'less':
less_flags = os.environ.get('LESS', '') + ' '.join(cmd_detail[1:])
if not less_flags:
env['LESS'] = '-R'
color = True
elif 'r' in less_flags or 'R' in less_flags:
color = True
if not color:
text = strip_ansi(text)
c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE,
env=env)
encoding = get_best_encoding(c.stdin)
try:
c.stdin.write(text.encode(encoding, 'replace'))
c.stdin.close()
except (IOError, KeyboardInterrupt):
pass
# Less doesn't respect ^C, but catches it for its own UI purposes (aborting
# search or other commands inside less).
#
# That means when the user hits ^C, the parent process (click) terminates,
# but less is still alive, paging the output and messing up the terminal.
#
# If the user wants to make the pager exit on ^C, they should set
# `LESS='-K'`. It's not our decision to make.
while True:
try:
c.wait()
except KeyboardInterrupt:
pass
else:
break
def _tempfilepager(text, cmd, color):
"""Page through text by invoking a program on a temporary file."""
import tempfile
filename = tempfile.mktemp()
if not color:
text = strip_ansi(text)
encoding = get_best_encoding(sys.stdout)
with open_stream(filename, 'wb')[0] as f:
f.write(text.encode(encoding))
try:
os.system(cmd + ' "' + filename + '"')
finally:
os.unlink(filename)
def _nullpager(stream, text, color):
"""Simply print unformatted text. This is the ultimate fallback."""
if not color:
text = strip_ansi(text)
stream.write(text)
class Editor(object):
def __init__(self, editor=None, env=None, require_save=True,
extension='.txt'):
self.editor = editor
self.env = env
self.require_save = require_save
self.extension = extension
def get_editor(self):
if self.editor is not None:
return self.editor
for key in 'VISUAL', 'EDITOR':
rv = os.environ.get(key)
if rv:
return rv
if WIN:
return 'notepad'
for editor in 'vim', 'nano':
if os.system('which %s >/dev/null 2>&1' % editor) == 0:
return editor
return 'vi'
def edit_file(self, filename):
import subprocess
editor = self.get_editor()
if self.env:
environ = os.environ.copy()
environ.update(self.env)
else:
environ = None
try:
c = subprocess.Popen('%s "%s"' % (editor, filename),
env=environ, shell=True)
exit_code = c.wait()
if exit_code != 0:
raise ClickException('%s: Editing failed!' % editor)
except OSError as e:
raise ClickException('%s: Editing failed: %s' % (editor, e))
def edit(self, text):
import tempfile
text = text or ''
if text and not text.endswith('\n'):
text += '\n'
fd, name = tempfile.mkstemp(prefix='editor-', suffix=self.extension)
try:
if WIN:
encoding = 'utf-8-sig'
text = text.replace('\n', '\r\n')
else:
encoding = 'utf-8'
text = text.encode(encoding)
f = os.fdopen(fd, 'wb')
f.write(text)
f.close()
timestamp = os.path.getmtime(name)
self.edit_file(name)
if self.require_save \
and os.path.getmtime(name) == timestamp:
return None
f = open(name, 'rb')
try:
rv = f.read()
finally:
f.close()
return rv.decode('utf-8-sig').replace('\r\n', '\n')
finally:
os.unlink(name)
def open_url(url, wait=False, locate=False):
import subprocess
def _unquote_file(url):
try:
import urllib
except ImportError:
import urllib
if url.startswith('file://'):
url = urllib.unquote(url[7:])
return url
if sys.platform == 'darwin':
args = ['open']
if wait:
args.append('-W')
if locate:
args.append('-R')
args.append(_unquote_file(url))
null = open('/dev/null', 'w')
try:
return subprocess.Popen(args, stderr=null).wait()
finally:
null.close()
elif WIN:
if locate:
url = _unquote_file(url)
args = 'explorer /select,"%s"' % _unquote_file(
url.replace('"', ''))
else:
args = 'start %s "" "%s"' % (
wait and '/WAIT' or '', url.replace('"', ''))
return os.system(args)
try:
if locate:
url = os.path.dirname(_unquote_file(url)) or '.'
else:
url = _unquote_file(url)
c = subprocess.Popen(['xdg-open', url])
if wait:
return c.wait()
return 0
except OSError:
if url.startswith(('http://', 'https://')) and not locate and not wait:
import webbrowser
webbrowser.open(url)
return 0
return 1
def _translate_ch_to_exc(ch):
if ch == '\x03':
raise KeyboardInterrupt()
if ch == '\x04':
raise EOFError()
if WIN:
import msvcrt
def getchar(echo):
rv = msvcrt.getch()
if echo:
msvcrt.putchar(rv)
_translate_ch_to_exc(rv)
if PY2:
enc = getattr(sys.stdin, 'encoding', None)
if enc is not None:
rv = rv.decode(enc, 'replace')
else:
rv = rv.decode('cp1252', 'replace')
return rv
else:
import tty
import termios
def getchar(echo):
if not isatty(sys.stdin):
f = open('/dev/tty')
fd = f.fileno()
else:
fd = sys.stdin.fileno()
f = None
try:
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(fd)
ch = os.read(fd, 32)
if echo and isatty(sys.stdout):
sys.stdout.write(ch)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
sys.stdout.flush()
if f is not None:
f.close()
except termios.error:
pass
_translate_ch_to_exc(ch)
return ch.decode(get_best_encoding(sys.stdin), 'replace')
| apache-2.0 | 5,682,536,871,789,093,000 | -2,712,610,608,182,596,000 | 28.939671 | 80 | 0.522806 | false |
crawfordsm/pysalt | lib/saltstat.py | 2 | 7174 | ################################# LICENSE ##################################
# Copyright (c) 2009, South African Astronomical Observatory (SAAO) #
# All rights reserved. #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions #
# are met: #
# #
# * Redistributions of source code must retain the above copyright #
# notice, this list of conditions and the following disclaimer. #
# * Redistributions in binary form must reproduce the above copyright #
# notice, this list of conditions and the following disclaimer #
# in the documentation and/or other materials provided with the #
# distribution. #
# * Neither the name of the South African Astronomical Observatory #
# (SAAO) nor the names of its contributors may be used to endorse #
# or promote products derived from this software without specific #
# prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY THE SAAO ''AS IS'' AND ANY EXPRESS OR #
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED #
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE #
# DISCLAIMED. IN NO EVENT SHALL THE SAAO BE LIABLE FOR ANY #
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL #
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS #
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) #
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, #
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN #
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
############################################################################
"""saltstat contains statistical functions"""
import numpy as np
from salterror import SaltError
def mean(list):
"""calculate mean of numeric list"""
total = 0
for item in list:
total += item
mean = total / len(list)
return mean
def median(x,logfile=None):
"""calculate median of numeric list
logfile--depreciated variable
"""
try:
return np.median(x)
except Exception, e:
message = 'Cannont calculate median because %s' % e
raise SaltError(message)
def mad(x):
"""Calculated the Median Absolute Deviation defined as:
MAD=median(|x - median(x)|)
"""
return np.median(abs(x-np.median(x)))
def median2d(arrays,logfile=None):
"""calculate median of 2d array
logfile--depreciated variable
"""
try:
arrays = arrays.ravel()
median = np.median(arrays)
except Exception, e:
median=None
message = 'ERROR -- SALTSTAT.MEDIAN2D: Cannot median image arrays because %s' % e
raise SaltError(message)
return median
def mean2d(arrays):
"""calculate mean of 2d array"""
mean = arrays[0]
for image in arrays[1:]:
mean += image
mean /= len(arrays)
return mean
def std2dclip(arrays, mean, std, sig):
"""calculate clipped std of 2d array"""
if np.size(arrays)==0: return 0
mask=(abs(arrays-mean) < sig*std)
nsize=np.sum(mask)
if nsize > 0:
stddev=arrays[mask].std()
else:
return 0
return stddev
def mean2dclip(arrays, mean, std, sig):
"""calculate the sigma clipped mean of 2d array"""
if np.size(arrays)==0: return 0
mask=(abs(arrays-mean) < sig*std)
if np.sum(mask) > 0:
mean=arrays[mask].mean()
else:
return 0
return mean
def median2dclip(arr, mean, std, sig):
"""calculate the sigma clipped median of 2d array"""
if np.size(arr)==0: return 0
try:
arr = arr.ravel()
mask=(abs(arr-mean) < sig*std)
median = np.median(arr[mask])
except Exception, e:
median=-1
return median
def iterstat(arr, sig, niter, verbose=False):
"""iterstas calculates an arrays statistics using
a sigma clipped values
"""
mean=arr.mean()
std=arr.std()
median=np.median(arr)
if verbose: print mean, median, std
for i in range(niter):
mask=(abs(arr-mean)<sig*std)
mean=arr[mask].mean()
std=arr[mask].std()
median=np.median(arr[mask])
if verbose: print i,mask.sum(), mean, median, std
return mean, median, std
def median_combine(arrays, logfile=None, axis=0):
"""Median combine a set of arrays
logfile--depreciated variable
"""
status = 0
try:
median = np.median(arrays, axis=axis)
except Exception, e:
median=None
message = 'ERROR -- SALTSTAT.MEDIAN_COMBINE: Cannot median combine arrays because %s' % e
raise SaltError(message)
return median, status
def median_image(arr, nbin):
"""Median smooth an image with a filter size set by bin
returns arr
"""
from scipy.ndimage.filters import median_filter
try:
arr=median_filter(arr,size=(nbin,nbin))
except Exception, e:
raise SaltError('Could not median filter image because %s' % e)
return arr
def median_absolute_deviation(a, axis=None):
"""Compute the median absolute deviation
Returns the median absolute deviation of the array elements. The MAD is
defined as median(|a-median(a)|).
Parameters
----------
a : array_like
Input array or object that can be converted to an array.
axis : int, optional
Axis along which the medians are computed. The default (axis=None)
is to compute the median along a flattened version of the array.
Returns
-------
median_absolute_deviation : ndarray
A new array holding the result. If the input contains
integers, or floats of smaller precision than 64, then the output
Examples
--------
This will generate random variates from a Gaussian distribution and return
the median absolute deviation for that distribution::
>>> from astropy.stats import median_aboslute_deviation
>>> from numpy.random import randn
>>> randvar = randn(10000)
>>> mad = median_absolute_deviation(randvar)
See Also
--------
median
"""
a = np.array(a, copy=False)
a_median = np.median(a, axis=axis)
#re-broadcast the output median array to subtract it
if axis is not None:
shape = list(a_median.shape)
shape.append(1)
a_median = a_median.reshape(shape)
#calculated the median average deviation
return np.median(np.abs(a - a_median), axis=axis)
| bsd-3-clause | 2,288,959,028,441,923,000 | 5,721,373,139,117,397,000 | 31.609091 | 97 | 0.586981 | false |
cybem/graphite-web-iow | webapp/graphite/logger.py | 23 | 3767 | """Copyright 2008 Orbitz WorldWide
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."""
import os, logging
from logging.handlers import TimedRotatingFileHandler as Rotater
try:
from logging import NullHandler
except ImportError as ie: # py2.6
from logging import Handler
class NullHandler(Handler):
def emit(self, record):
pass
try:
from logging import FileHandler
except ImportError as ie: # py2.6
from logging.handlers import FileHandler
from django.conf import settings
logging.addLevelName(30,"rendering")
logging.addLevelName(30,"cache")
logging.addLevelName(30,"metric_access")
class GraphiteLogger:
def __init__(self):
self.infoLogger = self._config_logger('info.log',
'info',
True,
level = logging.INFO,
)
self.exceptionLogger = self._config_logger('exception.log',
'exception',
True,
)
self.cacheLogger = self._config_logger('cache.log',
'cache',
settings.LOG_CACHE_PERFORMANCE,
)
self.renderingLogger = self._config_logger('rendering.log',
'rendering',
settings.LOG_RENDERING_PERFORMANCE,
)
self.metricAccessLogger = self._config_logger('metricaccess.log',
'metric_access',
settings.LOG_METRIC_ACCESS,
)
@staticmethod
def _config_logger(log_file_name, name, activate,
level=None, when='midnight', backupCount=1):
log_file = os.path.join(settings.LOG_DIR, log_file_name)
logger = logging.getLogger(name)
if level is not None:
logger.setLevel(level)
if activate: # if want to log this one
formatter = logging.Formatter("%(asctime)s :: %(message)s","%a %b %d %H:%M:%S %Y")
if settings.LOG_ROTATE: # if we want to rotate logs
handler = Rotater(log_file, when=when, backupCount=backupCount)
else: # let someone else, e.g. logrotate, rotate the logs
handler = FileHandler(log_file)
handler.setFormatter(formatter)
logger.addHandler(handler)
else:
logger.addHandler(NullHandler())
return logger
def info(self,msg,*args,**kwargs):
return self.infoLogger.info(msg,*args,**kwargs)
def exception(self,msg="Exception Caught",**kwargs):
return self.exceptionLogger.exception(msg,**kwargs)
def cache(self,msg,*args,**kwargs):
return self.cacheLogger.log(30,msg,*args,**kwargs)
def rendering(self,msg,*args,**kwargs):
return self.renderingLogger.log(30,msg,*args,**kwargs)
def metric_access(self,msg,*args,**kwargs):
return self.metricAccessLogger.log(30,msg,*args,**kwargs)
log = GraphiteLogger() # import-shared logger instance
| apache-2.0 | 741,367,386,475,380,500 | -4,625,407,959,349,146,000 | 38.652632 | 90 | 0.573666 | false |
partizand/bankparser | setup.py | 1 | 3023 | #!/usr/bin/python3
"""Setup
"""
#import distutils.cmd
import re
from setuptools import setup, find_packages
# import build
# import src.bankparser
# class GenFiles(distutils.cmd.Command):
# """Генерация некоторых файлов проекта и справки
# """
# user_options = []
# description = 'generate .py and readme command'
#
# def initialize_options(self):
# pass
#
# def finalize_options(self):
# pass
#
# def run(self):
# mybuild = build.MyBuild()
# mybuild.buid()
#
#
# class CopyScript(distutils.cmd.Command):
# """
# Для отладочных целей. Копирует пакет без установки в указанный каталог
# """
# user_options = [('pubdir=', None, 'Specify dir for public')]
# description = 'copy script for testing'
#
# def initialize_options(self):
# self.pubdir = None
#
# def finalize_options(self):
# pass
#
# def run(self):
# mybuild = build.MyBuild(self.pubdir)
# mybuild.copy_script()
# find version in init file
def find_version(filename):
with open(filename, 'r') as f:
version_file = f.read()
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
version = find_version("src/bankparser/__init__.py")
with open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(name='bankparser',
version=version,
author="partizand",
author_email="",
url="https://github.com/partizand/bankparser",
description="Convert banks statements to qif format",
long_description=long_description,
license="GPLv3",
keywords=["qif", "banking", "statement"],
#cmdclass={'copyscript': CopyScript, 'genfiles': GenFiles},
classifiers=[
'Development Status :: 3 - Alpha',
'Programming Language :: Python :: 3',
'Natural Language :: Russian',
'Topic :: Office/Business :: Financial :: Accounting',
'Topic :: Utilities',
'Environment :: Console',
'Operating System :: OS Independent',
'License :: OSI Approved :: GNU General Public License v3'],
#packages=find_packages('src'),
#packages=['bankparser', 'bankparser.banks', 'bankparser.test'],
packages=['bankparser', 'bankparser.banks'],
package_dir={'': 'src'},
#package_data={'bankparser': ['*.ini']},
test_suite='bankparser.test',
install_requires=['setuptools'],
# 'appdirs'
# ],
# namespace_packages=["bankparser"],
entry_points={
'console_scripts':
['bankparser = bankparser.bankparsercli:main'],
},
#include_package_data=True,
#zip_safe=False
)
| gpl-3.0 | -2,145,150,048,470,826,200 | -1,941,911,829,573,331,200 | 26.847619 | 76 | 0.578659 | false |
tima/ansible | lib/ansible/modules/messaging/rabbitmq_plugin.py | 85 | 4566 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Chatham Financial <oss@chathamfinancial.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: rabbitmq_plugin
short_description: Manage RabbitMQ plugins
description:
- Manage RabbitMQ plugins.
version_added: "1.1"
author:
- Chris Hoffman (@chrishoffman)
options:
names:
description:
- Comma-separated list of plugin names.
required: true
aliases: [name]
new_only:
description:
- Only enable missing plugins.
- Does not disable plugins that are not in the names list.
type: bool
default: "no"
state:
description:
- Specify if plugins are to be enabled or disabled.
default: enabled
choices: [enabled, disabled]
prefix:
description:
- Specify a custom install prefix to a Rabbit.
version_added: "1.3"
'''
EXAMPLES = '''
- name: Enables the rabbitmq_management plugin
rabbitmq_plugin:
names: rabbitmq_management
state: enabled
'''
RETURN = '''
enabled:
description: list of plugins enabled during task run
returned: always
type: list
sample: ["rabbitmq_management"]
disabled:
description: list of plugins disabled during task run
returned: always
type: list
sample: ["rabbitmq_management"]
'''
import os
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import AnsibleModule
class RabbitMqPlugins(object):
def __init__(self, module):
self.module = module
if module.params['prefix']:
if os.path.isdir(os.path.join(module.params['prefix'], 'bin')):
bin_path = os.path.join(module.params['prefix'], 'bin')
elif os.path.isdir(os.path.join(module.params['prefix'], 'sbin')):
bin_path = os.path.join(module.params['prefix'], 'sbin')
else:
# No such path exists.
raise Exception("No binary folder in prefix %s" %
module.params['prefix'])
self._rabbitmq_plugins = bin_path + "/rabbitmq-plugins"
else:
self._rabbitmq_plugins = module.get_bin_path('rabbitmq-plugins', True)
def _exec(self, args, run_in_check_mode=False):
if not self.module.check_mode or (self.module.check_mode and run_in_check_mode):
cmd = [self._rabbitmq_plugins]
rc, out, err = self.module.run_command(cmd + args, check_rc=True)
return out.splitlines()
return list()
def get_all(self):
list_output = self._exec(['list', '-E', '-m'], True)
plugins = []
for plugin in list_output:
if not plugin:
break
plugins.append(plugin)
return plugins
def enable(self, name):
self._exec(['enable', name])
def disable(self, name):
self._exec(['disable', name])
def main():
arg_spec = dict(
names=dict(required=True, aliases=['name']),
new_only=dict(default='no', type='bool'),
state=dict(default='enabled', choices=['enabled', 'disabled']),
prefix=dict(required=False, default=None)
)
module = AnsibleModule(
argument_spec=arg_spec,
supports_check_mode=True
)
result = dict()
names = module.params['names'].split(',')
new_only = module.params['new_only']
state = module.params['state']
rabbitmq_plugins = RabbitMqPlugins(module)
enabled_plugins = rabbitmq_plugins.get_all()
enabled = []
disabled = []
if state == 'enabled':
if not new_only:
for plugin in enabled_plugins:
if plugin not in names:
rabbitmq_plugins.disable(plugin)
disabled.append(plugin)
for name in names:
if name not in enabled_plugins:
rabbitmq_plugins.enable(name)
enabled.append(name)
else:
for plugin in enabled_plugins:
if plugin in names:
rabbitmq_plugins.disable(plugin)
disabled.append(plugin)
result['changed'] = len(enabled) > 0 or len(disabled) > 0
result['enabled'] = enabled
result['disabled'] = disabled
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | -7,400,426,556,925,978,000 | 7,387,118,556,517,486,000 | 26.672727 | 92 | 0.599869 | false |
GeotrekCE/Geotrek-admin | geotrek/api/v2/functions.py | 2 | 1450 | from django.db.models import Func
from django.db.models.fields import FloatField, CharField
from django.contrib.gis.db.models import GeometryField, PointField
def Transform(geom, srid):
"""
ST_Transform postgis function
"""
return Func(geom, srid, function='ST_Transform')
def Buffer(geom, radius, num_seg):
"""
ST_Buffer postgis function
"""
return Func(geom, radius, num_seg, function='ST_Buffer', output_field=GeometryField())
def GeometryType(geom):
"""
GeometryType postgis function
"""
return Func(geom, function='GeometryType', output_field=CharField())
def LineLocatePoint(line, geom):
"""
ST_LineLocatePoint postgis function
"""
return Func(line, geom, function='ST_LINELOCATEPOINT', output_field=FloatField())
class Length(Func):
"""
ST_Length postgis function
"""
function = 'ST_Length'
output_field = FloatField()
class Length3D(Func):
"""
ST_3DLENGTH postgis function
"""
function = 'ST_3DLENGTH'
output_field = FloatField()
class Area(Func):
"""
ST_Area postgis function
"""
function = 'ST_Area'
output_field = FloatField()
class StartPoint(Func):
"""
ST_StartPoint postgis function
"""
function = 'ST_StartPoint'
output_field = PointField()
class EndPoint(Func):
"""
ST_EndPoint postgis function
"""
function = 'ST_EndPoint'
output_field = PointField()
| bsd-2-clause | -2,468,473,180,279,639,600 | 6,552,344,953,825,513,000 | 19.422535 | 90 | 0.652414 | false |
ahmadiga/min_edx | openedx/core/djangoapps/content/course_overviews/management/commands/generate_course_overview.py | 29 | 2115 | """
Command to load course overviews.
"""
import logging
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from xmodule.modulestore.django import modulestore
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
log = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Example usage:
$ ./manage.py lms generate_course_overview --all --settings=devstack
$ ./manage.py lms generate_course_overview 'edX/DemoX/Demo_Course' --settings=devstack
"""
args = '<course_id course_id ...>'
help = 'Generates and stores course overview for one or more courses.'
option_list = BaseCommand.option_list + (
make_option('--all',
action='store_true',
default=False,
help='Generate course overview for all courses.'),
)
def handle(self, *args, **options):
course_keys = []
if options['all']:
course_keys = [course.id for course in modulestore().get_courses()]
else:
if len(args) < 1:
raise CommandError('At least one course or --all must be specified.')
try:
course_keys = [CourseKey.from_string(arg) for arg in args]
except InvalidKeyError:
log.fatal('Invalid key specified.')
if not course_keys:
log.fatal('No courses specified.')
log.info('Generating course overview for %d courses.', len(course_keys))
log.debug('Generating course overview(s) for the following courses: %s', course_keys)
for course_key in course_keys:
try:
CourseOverview.get_from_id(course_key)
except Exception as ex: # pylint: disable=broad-except
log.exception('An error occurred while generating course overview for %s: %s', unicode(
course_key), ex.message)
log.info('Finished generating course overviews.')
| agpl-3.0 | 5,991,222,673,754,292,000 | -8,320,152,297,349,881,000 | 34.25 | 103 | 0.626478 | false |
CloverHealth/airflow | airflow/contrib/kubernetes/kubernetes_request_factory/pod_request_factory.py | 1 | 3983 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import yaml
from airflow.contrib.kubernetes.kubernetes_request_factory.kubernetes_request_factory \
import KubernetesRequestFactory
class SimplePodRequestFactory(KubernetesRequestFactory):
"""
Request generator for a simple pod.
"""
_yaml = """apiVersion: v1
kind: Pod
metadata:
name: name
spec:
containers:
- name: base
image: airflow-worker:latest
command: ["/usr/local/airflow/entrypoint.sh", "/bin/bash sleep 25"]
restartPolicy: Never
"""
def __init__(self):
pass
def create(self, pod):
# type: (Pod) -> dict
req = yaml.load(self._yaml)
self.extract_name(pod, req)
self.extract_labels(pod, req)
self.extract_image(pod, req)
self.extract_image_pull_policy(pod, req)
self.extract_cmds(pod, req)
self.extract_args(pod, req)
self.extract_node_selector(pod, req)
self.extract_env_and_secrets(pod, req)
self.extract_volume_secrets(pod, req)
self.attach_volumes(pod, req)
self.attach_volume_mounts(pod, req)
self.extract_resources(pod, req)
self.extract_service_account_name(pod, req)
self.extract_init_containers(pod, req)
self.extract_image_pull_secrets(pod, req)
self.extract_annotations(pod, req)
self.extract_affinity(pod, req)
self.extract_tolerations(pod, req)
return req
class ExtractXcomPodRequestFactory(KubernetesRequestFactory):
XCOM_MOUNT_PATH = '/airflow/xcom'
SIDECAR_CONTAINER_NAME = 'airflow-xcom-sidecar'
"""
Request generator for a pod with sidecar container.
"""
_yaml = """apiVersion: v1
kind: Pod
metadata:
name: name
spec:
volumes:
- name: xcom
emptyDir: {{}}
containers:
- name: base
image: airflow-worker:latest
command: ["/usr/local/airflow/entrypoint.sh", "/bin/bash sleep 25"]
volumeMounts:
- name: xcom
mountPath: {xcomMountPath}
- name: {sidecarContainerName}
image: python:3.5-alpine
command: ["python", "-m", "http.server"]
volumeMounts:
- name: xcom
mountPath: {xcomMountPath}
restartPolicy: Never
""".format(xcomMountPath=XCOM_MOUNT_PATH, sidecarContainerName=SIDECAR_CONTAINER_NAME)
def __init__(self):
pass
def create(self, pod):
# type: (Pod) -> dict
req = yaml.load(self._yaml)
self.extract_name(pod, req)
self.extract_labels(pod, req)
self.extract_image(pod, req)
self.extract_image_pull_policy(pod, req)
self.extract_cmds(pod, req)
self.extract_args(pod, req)
self.extract_node_selector(pod, req)
self.extract_env_and_secrets(pod, req)
self.extract_volume_secrets(pod, req)
self.attach_volumes(pod, req)
self.attach_volume_mounts(pod, req)
self.extract_resources(pod, req)
self.extract_service_account_name(pod, req)
self.extract_init_containers(pod, req)
self.extract_image_pull_secrets(pod, req)
self.extract_annotations(pod, req)
self.extract_affinity(pod, req)
self.extract_tolerations(pod, req)
return req
| apache-2.0 | 5,026,196,407,239,332,000 | 9,085,796,109,907,925,000 | 31.917355 | 90 | 0.659302 | false |
1st/django-social-auth | social_auth/backends/contrib/douban.py | 3 | 2462 | """
Douban OAuth support.
This adds support for Douban OAuth service. An application must
be registered first on douban.com and the settings DOUBAN_CONSUMER_KEY
and DOUBAN_CONSUMER_SECRET must be defined with they corresponding
values.
By default account id is stored in extra_data field, check OAuthBackend
class for details on how to extend it.
"""
from django.utils import simplejson
from social_auth.backends import ConsumerBasedOAuth, OAuthBackend, USERNAME
from social_auth.backends.exceptions import AuthCanceled
DOUBAN_SERVER = 'www.douban.com'
DOUBAN_REQUEST_TOKEN_URL = 'http://%s/service/auth/request_token' % \
DOUBAN_SERVER
DOUBAN_ACCESS_TOKEN_URL = 'http://%s/service/auth/access_token' % \
DOUBAN_SERVER
DOUBAN_AUTHORIZATION_URL = 'http://%s/service/auth/authorize' % \
DOUBAN_SERVER
class DoubanBackend(OAuthBackend):
"""Douban OAuth authentication backend"""
name = 'douban'
EXTRA_DATA = [('id', 'id')]
def get_user_id(self, details, response):
return response['db:uid']['$t']
def get_user_details(self, response):
"""Return user details from Douban"""
return {USERNAME: response["db:uid"]["$t"],
'email': ''}
class DoubanAuth(ConsumerBasedOAuth):
"""Douban OAuth authentication mechanism"""
AUTHORIZATION_URL = DOUBAN_AUTHORIZATION_URL
REQUEST_TOKEN_URL = DOUBAN_REQUEST_TOKEN_URL
ACCESS_TOKEN_URL = DOUBAN_ACCESS_TOKEN_URL
SERVER_URL = DOUBAN_SERVER
AUTH_BACKEND = DoubanBackend
SETTINGS_KEY_NAME = 'DOUBAN_CONSUMER_KEY'
SETTINGS_SECRET_NAME = 'DOUBAN_CONSUMER_SECRET'
def user_data(self, access_token, *args, **kwargs):
"""Return user data provided"""
url = 'http://api.douban.com/people/%40me?&alt=json'
request = self.oauth_request(access_token, url)
json = self.fetch_response(request)
try:
return simplejson.loads(json)
except ValueError:
return None
def auth_complete(self, *args, **kwargs):
"""Completes login process, must return user instance"""
if 'denied' in self.data:
raise AuthCanceled(self)
else:
return super(DoubanAuth, self).auth_complete(*args, **kwargs)
# Backend definition
BACKENDS = {
'douban': DoubanAuth,
}
| bsd-3-clause | -6,108,917,231,719,249,000 | -5,867,254,506,878,728,000 | 31.27027 | 75 | 0.63485 | false |
sestrella/ansible | test/units/modules/network/ios/ios_module.py | 50 | 2516 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestIosModule(ModuleTestCase):
def execute_module(self, failed=False, changed=False, commands=None, sort=True, defaults=False):
self.load_fixtures(commands)
if failed:
result = self.failed()
self.assertTrue(result['failed'], result)
else:
result = self.changed(changed)
self.assertEqual(result['changed'], changed, result)
if commands is not None:
if sort:
self.assertEqual(sorted(commands), sorted(result['commands']), result['commands'])
else:
self.assertEqual(commands, result['commands'], result['commands'])
return result
def failed(self):
with self.assertRaises(AnsibleFailJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'], result)
return result
def changed(self, changed=False):
with self.assertRaises(AnsibleExitJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], changed, result)
return result
def load_fixtures(self, commands=None):
pass
| gpl-3.0 | -5,591,507,615,958,404,000 | 5,960,947,036,813,993,000 | 27.590909 | 100 | 0.660175 | false |
jordanemedlock/psychtruths | temboo/core/Library/eBay/Trading/GetMemberMessages.py | 4 | 6986 | # -*- coding: utf-8 -*-
###############################################################################
#
# GetMemberMessages
# Retrieves a list of the messages that buyers have posted about your active item listings.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetMemberMessages(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetMemberMessages Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(GetMemberMessages, self).__init__(temboo_session, '/Library/eBay/Trading/GetMemberMessages')
def new_input_set(self):
return GetMemberMessagesInputSet()
def _make_result_set(self, result, path):
return GetMemberMessagesResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetMemberMessagesChoreographyExecution(session, exec_id, path)
class GetMemberMessagesInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetMemberMessages
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_DisplayToPublic(self, value):
"""
Set the value of the DisplayToPublic input for this Choreo. ((optional, boolean) When set to true, only public messages (viewable in the Item listing) are returned.)
"""
super(GetMemberMessagesInputSet, self)._set_input('DisplayToPublic', value)
def set_EndCreationTime(self, value):
"""
Set the value of the EndCreationTime input for this Choreo. ((optional, date) Used to filter by date range (e.g., 2013-02-08T00:00:00.000Z).)
"""
super(GetMemberMessagesInputSet, self)._set_input('EndCreationTime', value)
def set_EntriesPerPage(self, value):
"""
Set the value of the EntriesPerPage input for this Choreo. ((optional, integer) The maximum number of records to return in the result.)
"""
super(GetMemberMessagesInputSet, self)._set_input('EntriesPerPage', value)
def set_ItemID(self, value):
"""
Set the value of the ItemID input for this Choreo. ((optional, string) The ID of the item the message is about.)
"""
super(GetMemberMessagesInputSet, self)._set_input('ItemID', value)
def set_MailMessageType(self, value):
"""
Set the value of the MailMessageType input for this Choreo. ((required, string) The type of message to retrieve. Valid values are: All and AskSellerQuestion. When set to AskSellerQuestion, ItemID or a date range filter must be specified.)
"""
super(GetMemberMessagesInputSet, self)._set_input('MailMessageType', value)
def set_MemberMessageID(self, value):
"""
Set the value of the MemberMessageID input for this Choreo. ((optional, string) An ID that uniquely identifies the message for a given user to be retrieved.)
"""
super(GetMemberMessagesInputSet, self)._set_input('MemberMessageID', value)
def set_MessageStatus(self, value):
"""
Set the value of the MessageStatus input for this Choreo. ((optional, string) The status of the message. Valid values are: Answered and Unanswered.)
"""
super(GetMemberMessagesInputSet, self)._set_input('MessageStatus', value)
def set_PageNumber(self, value):
"""
Set the value of the PageNumber input for this Choreo. ((optional, integer) Specifies the page number of the results to return.)
"""
super(GetMemberMessagesInputSet, self)._set_input('PageNumber', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are: json (the default) and xml.)
"""
super(GetMemberMessagesInputSet, self)._set_input('ResponseFormat', value)
def set_SandboxMode(self, value):
"""
Set the value of the SandboxMode input for this Choreo. ((conditional, boolean) Indicates that the request should be made to the sandbox endpoint instead of the production endpoint. Set to 1 to enable sandbox mode.)
"""
super(GetMemberMessagesInputSet, self)._set_input('SandboxMode', value)
def set_SenderID(self, value):
"""
Set the value of the SenderID input for this Choreo. ((optional, string) The seller's UserID.)
"""
super(GetMemberMessagesInputSet, self)._set_input('SenderID', value)
def set_SiteID(self, value):
"""
Set the value of the SiteID input for this Choreo. ((optional, string) The eBay site ID that you want to access. Defaults to 0 indicating the US site.)
"""
super(GetMemberMessagesInputSet, self)._set_input('SiteID', value)
def set_StartCreationTime(self, value):
"""
Set the value of the StartCreationTime input for this Choreo. ((optional, date) Used to filter by date range (e.g., 2013-02-08T00:00:00.000Z).)
"""
super(GetMemberMessagesInputSet, self)._set_input('StartCreationTime', value)
def set_UserToken(self, value):
"""
Set the value of the UserToken input for this Choreo. ((required, string) A valid eBay Auth Token.)
"""
super(GetMemberMessagesInputSet, self)._set_input('UserToken', value)
class GetMemberMessagesResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetMemberMessages Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from eBay.)
"""
return self._output.get('Response', None)
class GetMemberMessagesChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetMemberMessagesResultSet(response, path)
| apache-2.0 | -5,943,839,269,555,848,000 | 379,912,549,720,475,100 | 46.52381 | 246 | 0.678357 | false |
openhatch/oh-mainline | vendor/packages/scrapy/scrapy/http/response/dammit.py | 16 | 11593 | """
This module contains a fork of the UnicodeDammit class from BeautifulSoup, that
expliclty disabled any usage of chardet library.
The UnicodeDammit class is used as a last resource for detecting the encoding
of a response.
"""
import re
import codecs
chardet = None # we don't want to use chardet since it's very slow,
class UnicodeDammit:
"""A class for detecting the encoding of a *ML document and
converting it to a Unicode string. If the source encoding is
windows-1252, can replace MS smart quotes with their HTML or XML
equivalents."""
# This dictionary maps commonly seen values for "charset" in HTML
# meta tags to the corresponding Python codec names. It only covers
# values that aren't in Python's aliases and can't be determined
# by the heuristics in find_codec.
CHARSET_ALIASES = { "macintosh" : "mac-roman",
"x-sjis" : "shift-jis" }
def __init__(self, markup, overrideEncodings=[],
smartQuotesTo='xml', isHTML=False):
self.declaredHTMLEncoding = None
self.markup, documentEncoding, sniffedEncoding = \
self._detectEncoding(markup, isHTML)
self.smartQuotesTo = smartQuotesTo
self.triedEncodings = []
if markup == '' or isinstance(markup, unicode):
self.originalEncoding = None
self.unicode = unicode(markup)
return
u = None
for proposedEncoding in overrideEncodings:
u = self._convertFrom(proposedEncoding)
if u: break
if not u:
for proposedEncoding in (documentEncoding, sniffedEncoding):
u = self._convertFrom(proposedEncoding)
if u: break
# If no luck and we have auto-detection library, try that:
if not u and chardet and not isinstance(self.markup, unicode):
u = self._convertFrom(chardet.detect(self.markup)['encoding'])
# As a last resort, try utf-8 and windows-1252:
if not u:
for proposed_encoding in ("utf-8", "windows-1252"):
u = self._convertFrom(proposed_encoding)
if u: break
self.unicode = u
if not u: self.originalEncoding = None
def _subMSChar(self, orig):
"""Changes a MS smart quote character to an XML or HTML
entity."""
sub = self.MS_CHARS.get(orig)
if isinstance(sub, tuple):
if self.smartQuotesTo == 'xml':
sub = '&#x%s;' % sub[1]
else:
sub = '&%s;' % sub[0]
return sub
def _convertFrom(self, proposed):
proposed = self.find_codec(proposed)
if not proposed or proposed in self.triedEncodings:
return None
self.triedEncodings.append(proposed)
markup = self.markup
# Convert smart quotes to HTML if coming from an encoding
# that might have them.
if self.smartQuotesTo and proposed.lower() in("windows-1252",
"iso-8859-1",
"iso-8859-2"):
markup = re.compile("([\x80-\x9f])").sub \
(lambda(x): self._subMSChar(x.group(1)),
markup)
try:
# print "Trying to convert document to %s" % proposed
u = self._toUnicode(markup, proposed)
self.markup = u
self.originalEncoding = proposed
except Exception, e:
# print "That didn't work!"
# print e
return None
#print "Correct encoding: %s" % proposed
return self.markup
def _toUnicode(self, data, encoding):
'''Given a string and its encoding, decodes the string into Unicode.
%encoding is a string recognized by encodings.aliases'''
# strip Byte Order Mark (if present)
if (len(data) >= 4) and (data[:2] == '\xfe\xff') \
and (data[2:4] != '\x00\x00'):
encoding = 'utf-16be'
data = data[2:]
elif (len(data) >= 4) and (data[:2] == '\xff\xfe') \
and (data[2:4] != '\x00\x00'):
encoding = 'utf-16le'
data = data[2:]
elif data[:3] == '\xef\xbb\xbf':
encoding = 'utf-8'
data = data[3:]
elif data[:4] == '\x00\x00\xfe\xff':
encoding = 'utf-32be'
data = data[4:]
elif data[:4] == '\xff\xfe\x00\x00':
encoding = 'utf-32le'
data = data[4:]
newdata = unicode(data, encoding)
return newdata
def _detectEncoding(self, xml_data, isHTML=False):
"""Given a document, tries to detect its XML encoding."""
xml_encoding = sniffed_xml_encoding = None
try:
if xml_data[:4] == '\x4c\x6f\xa7\x94':
# EBCDIC
xml_data = self._ebcdic_to_ascii(xml_data)
elif xml_data[:4] == '\x00\x3c\x00\x3f':
# UTF-16BE
sniffed_xml_encoding = 'utf-16be'
xml_data = unicode(xml_data, 'utf-16be').encode('utf-8')
elif (len(xml_data) >= 4) and (xml_data[:2] == '\xfe\xff') \
and (xml_data[2:4] != '\x00\x00'):
# UTF-16BE with BOM
sniffed_xml_encoding = 'utf-16be'
xml_data = unicode(xml_data[2:], 'utf-16be').encode('utf-8')
elif xml_data[:4] == '\x3c\x00\x3f\x00':
# UTF-16LE
sniffed_xml_encoding = 'utf-16le'
xml_data = unicode(xml_data, 'utf-16le').encode('utf-8')
elif (len(xml_data) >= 4) and (xml_data[:2] == '\xff\xfe') and \
(xml_data[2:4] != '\x00\x00'):
# UTF-16LE with BOM
sniffed_xml_encoding = 'utf-16le'
xml_data = unicode(xml_data[2:], 'utf-16le').encode('utf-8')
elif xml_data[:4] == '\x00\x00\x00\x3c':
# UTF-32BE
sniffed_xml_encoding = 'utf-32be'
xml_data = unicode(xml_data, 'utf-32be').encode('utf-8')
elif xml_data[:4] == '\x3c\x00\x00\x00':
# UTF-32LE
sniffed_xml_encoding = 'utf-32le'
xml_data = unicode(xml_data, 'utf-32le').encode('utf-8')
elif xml_data[:4] == '\x00\x00\xfe\xff':
# UTF-32BE with BOM
sniffed_xml_encoding = 'utf-32be'
xml_data = unicode(xml_data[4:], 'utf-32be').encode('utf-8')
elif xml_data[:4] == '\xff\xfe\x00\x00':
# UTF-32LE with BOM
sniffed_xml_encoding = 'utf-32le'
xml_data = unicode(xml_data[4:], 'utf-32le').encode('utf-8')
elif xml_data[:3] == '\xef\xbb\xbf':
# UTF-8 with BOM
sniffed_xml_encoding = 'utf-8'
xml_data = unicode(xml_data[3:], 'utf-8').encode('utf-8')
else:
sniffed_xml_encoding = 'ascii'
pass
except:
xml_encoding_match = None
xml_encoding_match = re.compile(
'^<\?.*encoding=[\'"](.*?)[\'"].*\?>').match(xml_data)
if not xml_encoding_match and isHTML:
regexp = re.compile('<\s*meta[^>]+charset=([^>]*?)[;\'">]', re.I)
xml_encoding_match = regexp.search(xml_data)
if xml_encoding_match is not None:
xml_encoding = xml_encoding_match.groups()[0].lower()
if isHTML:
self.declaredHTMLEncoding = xml_encoding
if sniffed_xml_encoding and \
(xml_encoding in ('iso-10646-ucs-2', 'ucs-2', 'csunicode',
'iso-10646-ucs-4', 'ucs-4', 'csucs4',
'utf-16', 'utf-32', 'utf_16', 'utf_32',
'utf16', 'u16')):
xml_encoding = sniffed_xml_encoding
return xml_data, xml_encoding, sniffed_xml_encoding
def find_codec(self, charset):
return self._codec(self.CHARSET_ALIASES.get(charset, charset)) \
or (charset and self._codec(charset.replace("-", ""))) \
or (charset and self._codec(charset.replace("-", "_"))) \
or charset
def _codec(self, charset):
if not charset: return charset
codec = None
try:
codecs.lookup(charset)
codec = charset
except (LookupError, ValueError):
pass
return codec
EBCDIC_TO_ASCII_MAP = None
def _ebcdic_to_ascii(self, s):
c = self.__class__
if not c.EBCDIC_TO_ASCII_MAP:
emap = (0,1,2,3,156,9,134,127,151,141,142,11,12,13,14,15,
16,17,18,19,157,133,8,135,24,25,146,143,28,29,30,31,
128,129,130,131,132,10,23,27,136,137,138,139,140,5,6,7,
144,145,22,147,148,149,150,4,152,153,154,155,20,21,158,26,
32,160,161,162,163,164,165,166,167,168,91,46,60,40,43,33,
38,169,170,171,172,173,174,175,176,177,93,36,42,41,59,94,
45,47,178,179,180,181,182,183,184,185,124,44,37,95,62,63,
186,187,188,189,190,191,192,193,194,96,58,35,64,39,61,34,
195,97,98,99,100,101,102,103,104,105,196,197,198,199,200,
201,202,106,107,108,109,110,111,112,113,114,203,204,205,
206,207,208,209,126,115,116,117,118,119,120,121,122,210,
211,212,213,214,215,216,217,218,219,220,221,222,223,224,
225,226,227,228,229,230,231,123,65,66,67,68,69,70,71,72,
73,232,233,234,235,236,237,125,74,75,76,77,78,79,80,81,
82,238,239,240,241,242,243,92,159,83,84,85,86,87,88,89,
90,244,245,246,247,248,249,48,49,50,51,52,53,54,55,56,57,
250,251,252,253,254,255)
import string
c.EBCDIC_TO_ASCII_MAP = string.maketrans( \
''.join(map(chr, range(256))), ''.join(map(chr, emap)))
return s.translate(c.EBCDIC_TO_ASCII_MAP)
MS_CHARS = { '\x80' : ('euro', '20AC'),
'\x81' : ' ',
'\x82' : ('sbquo', '201A'),
'\x83' : ('fnof', '192'),
'\x84' : ('bdquo', '201E'),
'\x85' : ('hellip', '2026'),
'\x86' : ('dagger', '2020'),
'\x87' : ('Dagger', '2021'),
'\x88' : ('circ', '2C6'),
'\x89' : ('permil', '2030'),
'\x8A' : ('Scaron', '160'),
'\x8B' : ('lsaquo', '2039'),
'\x8C' : ('OElig', '152'),
'\x8D' : '?',
'\x8E' : ('#x17D', '17D'),
'\x8F' : '?',
'\x90' : '?',
'\x91' : ('lsquo', '2018'),
'\x92' : ('rsquo', '2019'),
'\x93' : ('ldquo', '201C'),
'\x94' : ('rdquo', '201D'),
'\x95' : ('bull', '2022'),
'\x96' : ('ndash', '2013'),
'\x97' : ('mdash', '2014'),
'\x98' : ('tilde', '2DC'),
'\x99' : ('trade', '2122'),
'\x9a' : ('scaron', '161'),
'\x9b' : ('rsaquo', '203A'),
'\x9c' : ('oelig', '153'),
'\x9d' : '?',
'\x9e' : ('#x17E', '17E'),
'\x9f' : ('Yuml', ''),}
#######################################################################
| agpl-3.0 | 2,712,102,671,511,802,000 | 7,018,538,812,340,006,000 | 42.096654 | 79 | 0.485638 | false |
fly19890211/edx-platform | docs/en_us/platform_api/source/conf.py | 6 | 7106 | # -*- coding: utf-8 -*-
# pylint: disable=invalid-name
# pylint: disable=redefined-builtin
# pylint: disable=protected-access
# pylint: disable=unused-argument
import os
from path import Path as path
import sys
import mock
MOCK_MODULES = [
'lxml',
'requests',
'xblock',
'fields',
'xblock.fields',
'frament',
'xblock.fragment',
'webob',
'multidict',
'webob.multidict',
'core',
'xblock.core',
'runtime',
'xblock.runtime',
'sortedcontainers',
'contracts',
'plugin',
'xblock.plugin',
'opaque_keys.edx.asides',
'asides',
'dogstats_wrapper',
'fs',
'fs.errors',
'edxmako',
'edxmako.shortcuts',
'shortcuts',
'crum',
'opaque_keys.edx.locator',
'LibraryLocator',
'Location',
'ipware',
'ip',
'ipware.ip',
'get_ip',
'pygeoip',
'ipaddr',
'django_countries',
'fields',
'django_countries.fields',
'opaque_keys',
'opaque_keys.edx',
'opaque_keys.edx.keys',
'CourseKey',
'UsageKey',
'BlockTypeKey',
'opaque_keys.edx.locations',
'SlashSeparatedCourseKey',
'Locator',
'south',
'modelsinspector',
'south.modelsinspector',
'add_introspection_rules',
'courseware',
'access',
'courseware.access',
'is_mobile_available_for_user',
'courseware.model_data',
'courseware.module_render',
'courseware.views',
'util.request',
'eventtracking',
'xmodule',
'xmodule.exceptions',
'xmodule.modulestore',
'xmodule.modulestore.exceptions',
'xmodule.modulestore.django',
'courseware.models',
'milestones',
'milestones.api',
'milestones.models',
'milestones.exceptions',
'ratelimitbackend',
'analytics',
'courseware.courses',
'staticfiles',
'storage',
'staticfiles.storage',
'content',
'xmodule.contentstore',
'xmodule.contentstore.content',
'xblock.exceptions',
'xmodule.seq_module',
'xmodule.vertical_module',
'xmodule.x_module',
'nltk',
'ratelimitbackend',
'ratelimitbackend.exceptions',
'social',
'social.apps',
'social.apps.django_app',
'social.backends',
'mako',
'exceptions',
'mako.exceptions',
'boto',
'exception',
'boto.exception',
'PIL',
'reportlab',
'lib',
'reportlab.lib',
'pdfgen',
'canvas',
'pdfgen',
'pdfgen.canvas',
'reportlab.pdfgen',
'reportlab.pdfgen.canvas',
'reportlab.lib.pagesizes',
'reportlab.lib.units',
'reportlab.lib.styles',
'reportlab.platypus',
'reportlab.platypus.tables',
'boto',
's3',
'connection',
'boto.s3',
'boto.s3.connection',
'boto.s3.key',
'Crypto',
'Crypto.Cipher',
'Crypto.PublicKey',
'openid',
'store',
'interface',
'openid.store',
'store.interface',
'openid.store.interface',
'external_auth.views',
'html_to_text',
'mail_utils',
'ratelimitbackend.backends',
'social.apps.django_app.default',
'social.exceptions',
'social.pipeline',
'xmodule.error_module',
'accounts.api',
'modulestore.mongo.base',
'xmodule.modulestore.mongo',
'xmodule.modulestore.mongo.base',
'edxval',
'edxval.api',
'model_utils',
'model_utils.models',
'model_utils.managers',
'certificates',
'certificates.models',
'certificates.models.GeneratedCertificate',
'shoppingcart',
'shopppingcart.models',
'shopppingcart.api',
'api',
'student',
'student.views',
'student.forms',
'student.models',
'celery',
'celery.task',
'student.roles',
'embargo.models',
'xmodule.vertical_block',
'vertical_block',
'errors',
'UserNotFound',
'UserNotAuthorized',
'AccountUpdateError',
'AccountValidationError',
'transaction',
'parsers',
'MergePatchParser',
'get_account_settings',
'update_account_settings',
'serializers',
'profile_images.images',
'xmodule.course_module',
'user_api.accounts.api',
'user_api.accounts.serializers',
'ecommerce_api_client',
'client',
'ecommerce_api_client.client',
'ecommerce_api_client.exceptions',
'student.auth',
'ccx_keys',
'ccx_keys.locator',
'user_api.preferences.api'
]
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = mock.Mock(class_that_is_extended=object)
if "DJANGO_SETTINGS_MODULE" not in os.environ:
docs_path = os.getcwd()
mezzanine_path_parts = (docs_path, "..")
sys.path.insert(0, docs_path)
sys.path.insert(0, os.path.realpath(os.path.join(*mezzanine_path_parts)))
os.environ["DJANGO_SETTINGS_MODULE"] = "docs_settings"
# Django 1.7's setup is required before touching translated strings.
import django
try:
django.setup()
except AttributeError: # < 1.7
pass
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
sys.path.append('../../../../')
os.environ['DJANGO_SETTINGS_MODULE'] = 'lms.envs.dev'
#os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lms.envs.dev")
from docs.shared.conf import *
# Add any paths that contain templates here, relative to this directory.
#templates_path.append('source/_templates')
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path.append('source/_static')
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
root = path('../../../..').abspath()
sys.path.insert(0, root)
sys.path.append(root / "common/lib/xmodule")
sys.path.append(root / "common/djangoapps")
sys.path.append(root / "lms/djangoapps")
sys.path.append(root / "lms/envs")
sys.path.append(root / "openedx/core/djangoapps")
sys.path.insert(
0,
os.path.abspath(
os.path.normpath(
os.path.dirname(__file__) + '/../../../'
)
)
)
sys.path.append('.')
# django configuration - careful here
if on_rtd:
os.environ['DJANGO_SETTINGS_MODULE'] = 'lms'
else:
os.environ['DJANGO_SETTINGS_MODULE'] = 'lms'
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx',
'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.pngmath',
'sphinx.ext.mathjax', 'sphinx.ext.viewcode', 'sphinxcontrib.napoleon']
project = u'Open edX Platform APIs'
copyright = u'2015, edX'
exclude_patterns = ['build', 'links.rst']
| agpl-3.0 | -7,067,983,712,304,558,000 | 7,661,006,332,404,854,000 | 24.469534 | 80 | 0.632423 | false |
bjlittle/iris | docs/gallery_code/oceanography/plot_atlantic_profiles.py | 2 | 3317 | """
Oceanographic Profiles and T-S Diagrams
=======================================
This example demonstrates how to plot vertical profiles of different
variables in the same axes, and how to make a scatter plot of two
variables. There is an oceanographic theme but the same techniques are
equally applicable to atmospheric or other kinds of data.
The data used are profiles of potential temperature and salinity in the
Equatorial and South Atlantic, output from an ocean model.
The y-axis of the first plot produced will be automatically inverted due to the
presence of the attribute positive=down on the depth coordinate. This means
depth values intuitively increase downward on the y-axis.
"""
import matplotlib.pyplot as plt
import iris
import iris.iterate
import iris.plot as iplt
def main():
# Load the gridded temperature and salinity data.
fname = iris.sample_data_path("atlantic_profiles.nc")
cubes = iris.load(fname)
(theta,) = cubes.extract("sea_water_potential_temperature")
(salinity,) = cubes.extract("sea_water_practical_salinity")
# Extract profiles of temperature and salinity from a particular point in
# the southern portion of the domain, and limit the depth of the profile
# to 1000m.
lon_cons = iris.Constraint(longitude=330.5)
lat_cons = iris.Constraint(latitude=lambda l: -10 < l < -9)
depth_cons = iris.Constraint(depth=lambda d: d <= 1000)
theta_1000m = theta.extract(depth_cons & lon_cons & lat_cons)
salinity_1000m = salinity.extract(depth_cons & lon_cons & lat_cons)
# Plot these profiles on the same set of axes. Depth is automatically
# recognised as a vertical coordinate and placed on the y-axis.
# The first plot is in the default axes. We'll use the same color for the
# curve and its axes/tick labels.
plt.figure(figsize=(5, 6))
temperature_color = (0.3, 0.4, 0.5)
ax1 = plt.gca()
iplt.plot(
theta_1000m,
linewidth=2,
color=temperature_color,
alpha=0.75,
)
ax1.set_xlabel("Potential Temperature / K", color=temperature_color)
ax1.set_ylabel("Depth / m")
for ticklabel in ax1.get_xticklabels():
ticklabel.set_color(temperature_color)
# To plot salinity in the same axes we use twiny(). We'll use a different
# color to identify salinity.
salinity_color = (0.6, 0.1, 0.15)
ax2 = plt.gca().twiny()
iplt.plot(
salinity_1000m,
linewidth=2,
color=salinity_color,
alpha=0.75,
)
ax2.set_xlabel("Salinity / PSU", color=salinity_color)
for ticklabel in ax2.get_xticklabels():
ticklabel.set_color(salinity_color)
plt.tight_layout()
iplt.show()
# Now plot a T-S diagram using scatter. We'll use all the profiles here,
# and each point will be coloured according to its depth.
plt.figure(figsize=(6, 6))
depth_values = theta.coord("depth").points
for s, t in iris.iterate.izip(salinity, theta, coords="depth"):
iplt.scatter(s, t, c=depth_values, marker="+", cmap="RdYlBu_r")
ax = plt.gca()
ax.set_xlabel("Salinity / PSU")
ax.set_ylabel("Potential Temperature / K")
cb = plt.colorbar(orientation="horizontal")
cb.set_label("Depth / m")
plt.tight_layout()
iplt.show()
if __name__ == "__main__":
main()
| lgpl-3.0 | 2,971,815,230,170,250,000 | -3,491,215,907,460,557,000 | 35.054348 | 79 | 0.676515 | false |
davidjb/sqlalchemy | test/engine/test_reflection.py | 21 | 59908 | import operator
import unicodedata
import sqlalchemy as sa
from sqlalchemy import schema, events, event, inspect
from sqlalchemy import MetaData, Integer, String
from sqlalchemy.testing import (ComparesTables, engines, AssertsCompiledSQL,
fixtures, skip)
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
from sqlalchemy import testing
from sqlalchemy.util import ue
metadata, users = None, None
class ReflectionTest(fixtures.TestBase, ComparesTables):
__backend__ = True
@testing.exclude('mssql', '<', (10, 0, 0),
'Date is only supported on MSSQL 2008+')
@testing.exclude('mysql', '<', (4, 1, 1),
'early types are squirrely')
@testing.provide_metadata
def test_basic_reflection(self):
meta = self.metadata
users = Table('engine_users', meta,
Column('user_id', sa.INT, primary_key=True),
Column('user_name', sa.VARCHAR(20), nullable=False),
Column('test1', sa.CHAR(5), nullable=False),
Column('test2', sa.Float(5), nullable=False),
Column('test3', sa.Text),
Column('test4', sa.Numeric(10, 2), nullable=False),
Column('test5', sa.Date),
Column('parent_user_id', sa.Integer,
sa.ForeignKey('engine_users.user_id')),
Column('test6', sa.Date, nullable=False),
Column('test7', sa.Text),
Column('test8', sa.LargeBinary),
Column('test_passivedefault2', sa.Integer, server_default='5'),
Column('test9', sa.LargeBinary(100)),
Column('test10', sa.Numeric(10, 2)),
test_needs_fk=True,
)
addresses = Table(
'engine_email_addresses',
meta,
Column('address_id', sa.Integer, primary_key=True),
Column('remote_user_id', sa.Integer,
sa.ForeignKey(users.c.user_id)),
Column('email_address', sa.String(20)),
test_needs_fk=True,
)
meta.create_all()
meta2 = MetaData()
reflected_users = Table('engine_users', meta2,
autoload=True,
autoload_with=testing.db)
reflected_addresses = Table('engine_email_addresses',
meta2, autoload=True, autoload_with=testing.db)
self.assert_tables_equal(users, reflected_users)
self.assert_tables_equal(addresses, reflected_addresses)
@testing.provide_metadata
def test_autoload_with_imply_autoload(self,):
meta = self.metadata
t = Table(
't',
meta,
Column('id', sa.Integer, primary_key=True),
Column('x', sa.String(20)),
Column('y', sa.Integer))
meta.create_all()
meta2 = MetaData()
reflected_t = Table('t', meta2,
autoload_with=testing.db)
self.assert_tables_equal(t, reflected_t)
@testing.provide_metadata
def test_two_foreign_keys(self):
meta = self.metadata
Table(
't1',
meta,
Column('id', sa.Integer, primary_key=True),
Column('t2id', sa.Integer, sa.ForeignKey('t2.id')),
Column('t3id', sa.Integer, sa.ForeignKey('t3.id')),
test_needs_fk=True,
)
Table('t2', meta,
Column('id', sa.Integer, primary_key=True),
test_needs_fk=True)
Table('t3', meta,
Column('id', sa.Integer, primary_key=True),
test_needs_fk=True)
meta.create_all()
meta2 = MetaData()
t1r, t2r, t3r = [Table(x, meta2, autoload=True,
autoload_with=testing.db) for x in ('t1',
't2', 't3')]
assert t1r.c.t2id.references(t2r.c.id)
assert t1r.c.t3id.references(t3r.c.id)
def test_nonexistent(self):
meta = MetaData(testing.db)
assert_raises(sa.exc.NoSuchTableError, Table, 'nonexistent',
meta, autoload=True)
assert 'nonexistent' not in meta.tables
@testing.provide_metadata
def test_include_columns(self):
meta = self.metadata
foo = Table('foo', meta, *[Column(n, sa.String(30))
for n in ['a', 'b', 'c', 'd', 'e', 'f']])
meta.create_all()
meta2 = MetaData(testing.db)
foo = Table('foo', meta2, autoload=True,
include_columns=['b', 'f', 'e'])
# test that cols come back in original order
eq_([c.name for c in foo.c], ['b', 'e', 'f'])
for c in ('b', 'f', 'e'):
assert c in foo.c
for c in ('a', 'c', 'd'):
assert c not in foo.c
# test against a table which is already reflected
meta3 = MetaData(testing.db)
foo = Table('foo', meta3, autoload=True)
foo = Table('foo', meta3, include_columns=['b', 'f', 'e'],
extend_existing=True)
eq_([c.name for c in foo.c], ['b', 'e', 'f'])
for c in ('b', 'f', 'e'):
assert c in foo.c
for c in ('a', 'c', 'd'):
assert c not in foo.c
@testing.provide_metadata
def test_extend_existing(self):
meta = self.metadata
Table('t', meta,
Column('id', Integer, primary_key=True),
Column('x', Integer),
Column('y', Integer),
Column('z', Integer, server_default="5"),
)
meta.create_all()
m2 = MetaData()
old_z = Column('z', String, primary_key=True)
old_y = Column('y', String)
old_q = Column('q', Integer)
t2 = Table('t', m2, old_z, old_q)
eq_(t2.primary_key.columns, (t2.c.z, ))
t2 = Table('t', m2, old_y,
extend_existing=True,
autoload=True,
autoload_with=testing.db)
eq_(
set(t2.columns.keys()),
set(['x', 'y', 'z', 'q', 'id'])
)
eq_(t2.primary_key.columns, (t2.c.id, ))
assert t2.c.z is not old_z
assert t2.c.y is old_y
assert t2.c.z.type._type_affinity is Integer
assert t2.c.q is old_q
m3 = MetaData()
t3 = Table('t', m3, Column('z', Integer))
t3 = Table('t', m3, extend_existing=False,
autoload=True,
autoload_with=testing.db)
eq_(
set(t3.columns.keys()),
set(['z'])
)
m4 = MetaData()
old_z = Column('z', String, primary_key=True)
old_y = Column('y', String)
old_q = Column('q', Integer)
t4 = Table('t', m4, old_z, old_q)
eq_(t4.primary_key.columns, (t4.c.z, ))
t4 = Table('t', m4, old_y,
extend_existing=True,
autoload=True,
autoload_replace=False,
autoload_with=testing.db)
eq_(
set(t4.columns.keys()),
set(['x', 'y', 'z', 'q', 'id'])
)
eq_(t4.primary_key.columns, (t4.c.id, ))
assert t4.c.z is old_z
assert t4.c.y is old_y
assert t4.c.z.type._type_affinity is String
assert t4.c.q is old_q
@testing.emits_warning(r".*omitted columns")
@testing.provide_metadata
def test_include_columns_indexes(self):
m = self.metadata
t1 = Table('t1', m, Column('a', sa.Integer), Column('b', sa.Integer))
sa.Index('foobar', t1.c.a, t1.c.b)
sa.Index('bat', t1.c.a)
m.create_all()
m2 = MetaData(testing.db)
t2 = Table('t1', m2, autoload=True)
assert len(t2.indexes) == 2
m2 = MetaData(testing.db)
t2 = Table('t1', m2, autoload=True, include_columns=['a'])
assert len(t2.indexes) == 1
m2 = MetaData(testing.db)
t2 = Table('t1', m2, autoload=True, include_columns=['a', 'b'])
assert len(t2.indexes) == 2
@testing.provide_metadata
def test_autoload_replace_foreign_key_nonpresent(self):
"""test autoload_replace=False with col plus FK
establishes the FK not present in the DB.
"""
Table('a', self.metadata, Column('id', Integer, primary_key=True))
Table('b', self.metadata, Column('id', Integer, primary_key=True),
Column('a_id', Integer))
self.metadata.create_all()
m2 = MetaData()
b2 = Table('b', m2, Column('a_id', Integer, sa.ForeignKey('a.id')))
a2 = Table('a', m2, autoload=True, autoload_with=testing.db)
b2 = Table('b', m2, extend_existing=True, autoload=True,
autoload_with=testing.db,
autoload_replace=False)
assert b2.c.id is not None
assert b2.c.a_id.references(a2.c.id)
eq_(len(b2.constraints), 2)
@testing.provide_metadata
def test_autoload_replace_foreign_key_ispresent(self):
"""test autoload_replace=False with col plus FK mirroring
DB-reflected FK skips the reflected FK and installs
the in-python one only.
"""
Table('a', self.metadata, Column('id', Integer, primary_key=True))
Table('b', self.metadata, Column('id', Integer, primary_key=True),
Column('a_id', Integer, sa.ForeignKey('a.id')))
self.metadata.create_all()
m2 = MetaData()
b2 = Table('b', m2, Column('a_id', Integer, sa.ForeignKey('a.id')))
a2 = Table('a', m2, autoload=True, autoload_with=testing.db)
b2 = Table('b', m2, extend_existing=True, autoload=True,
autoload_with=testing.db,
autoload_replace=False)
assert b2.c.id is not None
assert b2.c.a_id.references(a2.c.id)
eq_(len(b2.constraints), 2)
@testing.provide_metadata
def test_autoload_replace_foreign_key_removed(self):
"""test autoload_replace=False with col minus FK that's in the
DB means the FK is skipped and doesn't get installed at all.
"""
Table('a', self.metadata, Column('id', Integer, primary_key=True))
Table('b', self.metadata, Column('id', Integer, primary_key=True),
Column('a_id', Integer, sa.ForeignKey('a.id')))
self.metadata.create_all()
m2 = MetaData()
b2 = Table('b', m2, Column('a_id', Integer))
a2 = Table('a', m2, autoload=True, autoload_with=testing.db)
b2 = Table('b', m2, extend_existing=True, autoload=True,
autoload_with=testing.db,
autoload_replace=False)
assert b2.c.id is not None
assert not b2.c.a_id.references(a2.c.id)
eq_(len(b2.constraints), 1)
@testing.provide_metadata
def test_autoload_replace_primary_key(self):
Table('a', self.metadata, Column('id', Integer))
self.metadata.create_all()
m2 = MetaData()
a2 = Table('a', m2, Column('id', Integer, primary_key=True))
Table('a', m2, autoload=True, autoload_with=testing.db,
autoload_replace=False, extend_existing=True)
eq_(list(a2.primary_key), [a2.c.id])
def test_autoload_replace_arg(self):
Table('t', MetaData(), autoload_replace=False)
@testing.provide_metadata
def test_autoincrement_col(self):
"""test that 'autoincrement' is reflected according to sqla's policy.
Don't mark this test as unsupported for any backend !
(technically it fails with MySQL InnoDB since "id" comes before "id2")
"""
meta = self.metadata
Table('test', meta,
Column('id', sa.Integer, primary_key=True),
Column('data', sa.String(50)),
mysql_engine='MyISAM'
)
Table('test2', meta,
Column('id', sa.Integer, sa.ForeignKey('test.id'),
primary_key=True),
Column('id2', sa.Integer, primary_key=True),
Column('data', sa.String(50)),
mysql_engine='MyISAM'
)
meta.create_all()
m2 = MetaData(testing.db)
t1a = Table('test', m2, autoload=True)
assert t1a._autoincrement_column is t1a.c.id
t2a = Table('test2', m2, autoload=True)
assert t2a._autoincrement_column is t2a.c.id2
@skip('sqlite')
@testing.provide_metadata
def test_unknown_types(self):
"""Test the handling of unknown types for the given dialect.
sqlite is skipped because it has special rules for unknown types using
'affinity types' - this feature is tested in that dialect's test spec.
"""
meta = self.metadata
t = Table("test", meta,
Column('foo', sa.DateTime))
ischema_names = testing.db.dialect.ischema_names
t.create()
testing.db.dialect.ischema_names = {}
try:
m2 = MetaData(testing.db)
assert_raises(sa.exc.SAWarning, Table, "test", m2, autoload=True)
@testing.emits_warning('Did not recognize type')
def warns():
m3 = MetaData(testing.db)
t3 = Table("test", m3, autoload=True)
assert t3.c.foo.type.__class__ == sa.types.NullType
finally:
testing.db.dialect.ischema_names = ischema_names
@testing.provide_metadata
def test_basic_override(self):
meta = self.metadata
table = Table(
'override_test', meta,
Column('col1', sa.Integer, primary_key=True),
Column('col2', sa.String(20)),
Column('col3', sa.Numeric)
)
table.create()
meta2 = MetaData(testing.db)
table = Table(
'override_test', meta2,
Column('col2', sa.Unicode()),
Column('col4', sa.String(30)), autoload=True)
self.assert_(isinstance(table.c.col1.type, sa.Integer))
self.assert_(isinstance(table.c.col2.type, sa.Unicode))
self.assert_(isinstance(table.c.col4.type, sa.String))
@testing.provide_metadata
def test_override_upgrade_pk_flag(self):
meta = self.metadata
table = Table(
'override_test', meta,
Column('col1', sa.Integer),
Column('col2', sa.String(20)),
Column('col3', sa.Numeric)
)
table.create()
meta2 = MetaData(testing.db)
table = Table(
'override_test', meta2,
Column('col1', sa.Integer, primary_key=True),
autoload=True)
eq_(list(table.primary_key), [table.c.col1])
eq_(table.c.col1.primary_key, True)
@testing.provide_metadata
def test_override_pkfk(self):
"""test that you can override columns which contain foreign keys
to other reflected tables, where the foreign key column is also
a primary key column"""
meta = self.metadata
Table('users', meta,
Column('id', sa.Integer, primary_key=True),
Column('name', sa.String(30)))
Table('addresses', meta,
Column('id', sa.Integer, primary_key=True),
Column('street', sa.String(30)))
meta.create_all()
meta2 = MetaData(testing.db)
a2 = Table('addresses', meta2,
Column('id', sa.Integer,
sa.ForeignKey('users.id'), primary_key=True),
autoload=True)
u2 = Table('users', meta2, autoload=True)
assert list(a2.primary_key) == [a2.c.id]
assert list(u2.primary_key) == [u2.c.id]
assert u2.join(a2).onclause.compare(u2.c.id == a2.c.id)
meta3 = MetaData(testing.db)
u3 = Table('users', meta3, autoload=True)
a3 = Table('addresses', meta3,
Column('id', sa.Integer, sa.ForeignKey('users.id'),
primary_key=True),
autoload=True)
assert list(a3.primary_key) == [a3.c.id]
assert list(u3.primary_key) == [u3.c.id]
assert u3.join(a3).onclause.compare(u3.c.id == a3.c.id)
@testing.provide_metadata
def test_override_nonexistent_fk(self):
"""test that you can override columns and create new foreign
keys to other reflected tables which have no foreign keys. this
is common with MySQL MyISAM tables."""
meta = self.metadata
Table('users', meta,
Column('id', sa.Integer, primary_key=True),
Column('name', sa.String(30)))
Table('addresses', meta,
Column('id', sa.Integer, primary_key=True),
Column('street', sa.String(30)),
Column('user_id', sa.Integer))
meta.create_all()
meta2 = MetaData(testing.db)
a2 = Table('addresses', meta2,
Column('user_id', sa.Integer, sa.ForeignKey('users.id')),
autoload=True)
u2 = Table('users', meta2, autoload=True)
assert len(a2.c.user_id.foreign_keys) == 1
assert len(a2.foreign_keys) == 1
assert [c.parent for c in a2.foreign_keys] == [a2.c.user_id]
assert [c.parent for c in a2.c.user_id.foreign_keys] \
== [a2.c.user_id]
assert list(a2.c.user_id.foreign_keys)[0].parent \
is a2.c.user_id
assert u2.join(a2).onclause.compare(u2.c.id == a2.c.user_id)
meta3 = MetaData(testing.db)
u3 = Table('users', meta3, autoload=True)
a3 = Table('addresses', meta3, Column('user_id',
sa.Integer, sa.ForeignKey('users.id')),
autoload=True)
assert u3.join(a3).onclause.compare(u3.c.id == a3.c.user_id)
meta4 = MetaData(testing.db)
u4 = Table('users', meta4,
Column('id', sa.Integer, key='u_id', primary_key=True),
autoload=True)
a4 = Table(
'addresses',
meta4,
Column('id', sa.Integer, key='street',
primary_key=True),
Column('street', sa.String(30), key='user_id'),
Column('user_id', sa.Integer, sa.ForeignKey('users.u_id'
), key='id'),
autoload=True,
)
assert u4.join(a4).onclause.compare(u4.c.u_id == a4.c.id)
assert list(u4.primary_key) == [u4.c.u_id]
assert len(u4.columns) == 2
assert len(u4.constraints) == 1
assert len(a4.columns) == 3
assert len(a4.constraints) == 2
@testing.provide_metadata
def test_override_composite_fk(self):
"""Test double-remove of composite foreign key, when replaced."""
metadata = self.metadata
Table('a',
metadata,
Column('x', sa.Integer, primary_key=True),
Column('y', sa.Integer, primary_key=True),
)
Table('b',
metadata,
Column('x', sa.Integer, primary_key=True),
Column('y', sa.Integer, primary_key=True),
sa.ForeignKeyConstraint(['x', 'y'], ['a.x', 'a.y'])
)
metadata.create_all()
meta2 = MetaData()
c1 = Column('x', sa.Integer, primary_key=True)
c2 = Column('y', sa.Integer, primary_key=True)
f1 = sa.ForeignKeyConstraint(['x', 'y'], ['a.x', 'a.y'])
b1 = Table('b',
meta2, c1, c2, f1,
autoload=True,
autoload_with=testing.db
)
assert b1.c.x is c1
assert b1.c.y is c2
assert f1 in b1.constraints
assert len(b1.constraints) == 2
@testing.provide_metadata
def test_override_keys(self):
"""test that columns can be overridden with a 'key',
and that ForeignKey targeting during reflection still works."""
meta = self.metadata
Table('a', meta,
Column('x', sa.Integer, primary_key=True),
Column('z', sa.Integer),
test_needs_fk=True
)
Table('b', meta,
Column('y', sa.Integer, sa.ForeignKey('a.x')),
test_needs_fk=True
)
meta.create_all()
m2 = MetaData(testing.db)
a2 = Table('a', m2,
Column('x', sa.Integer, primary_key=True, key='x1'),
autoload=True)
b2 = Table('b', m2, autoload=True)
assert a2.join(b2).onclause.compare(a2.c.x1 == b2.c.y)
assert b2.c.y.references(a2.c.x1)
@testing.provide_metadata
def test_nonreflected_fk_raises(self):
"""test that a NoReferencedColumnError is raised when reflecting
a table with an FK to another table which has not included the target
column in its reflection.
"""
meta = self.metadata
Table('a', meta,
Column('x', sa.Integer, primary_key=True),
Column('z', sa.Integer),
test_needs_fk=True
)
Table('b', meta,
Column('y', sa.Integer, sa.ForeignKey('a.x')),
test_needs_fk=True
)
meta.create_all()
m2 = MetaData(testing.db)
a2 = Table('a', m2, include_columns=['z'], autoload=True)
b2 = Table('b', m2, autoload=True)
assert_raises(sa.exc.NoReferencedColumnError, a2.join, b2)
@testing.exclude('mysql', '<', (4, 1, 1), 'innodb funkiness')
@testing.provide_metadata
def test_override_existing_fk(self):
"""test that you can override columns and specify new foreign
keys to other reflected tables, on columns which *do* already
have that foreign key, and that the FK is not duped. """
meta = self.metadata
Table('users', meta,
Column('id', sa.Integer, primary_key=True),
Column('name', sa.String(30)),
test_needs_fk=True)
Table('addresses', meta,
Column('id', sa.Integer, primary_key=True),
Column('user_id', sa.Integer, sa.ForeignKey('users.id')),
test_needs_fk=True)
meta.create_all()
meta2 = MetaData(testing.db)
a2 = Table('addresses', meta2,
Column('user_id', sa.Integer, sa.ForeignKey('users.id')),
autoload=True)
u2 = Table('users', meta2, autoload=True)
s = sa.select([a2])
assert s.c.user_id is not None
assert len(a2.foreign_keys) == 1
assert len(a2.c.user_id.foreign_keys) == 1
assert len(a2.constraints) == 2
assert [c.parent for c in a2.foreign_keys] == [a2.c.user_id]
assert [c.parent for c in a2.c.user_id.foreign_keys] \
== [a2.c.user_id]
assert list(a2.c.user_id.foreign_keys)[0].parent \
is a2.c.user_id
assert u2.join(a2).onclause.compare(u2.c.id == a2.c.user_id)
meta2 = MetaData(testing.db)
u2 = Table('users', meta2, Column('id', sa.Integer,
primary_key=True), autoload=True)
a2 = Table('addresses', meta2, Column('id', sa.Integer,
primary_key=True), Column('user_id', sa.Integer,
sa.ForeignKey('users.id')), autoload=True)
s = sa.select([a2])
assert s.c.user_id is not None
assert len(a2.foreign_keys) == 1
assert len(a2.c.user_id.foreign_keys) == 1
assert len(a2.constraints) == 2
assert [c.parent for c in a2.foreign_keys] == [a2.c.user_id]
assert [c.parent for c in a2.c.user_id.foreign_keys] \
== [a2.c.user_id]
assert list(a2.c.user_id.foreign_keys)[0].parent \
is a2.c.user_id
assert u2.join(a2).onclause.compare(u2.c.id == a2.c.user_id)
@testing.only_on(['postgresql', 'mysql'])
@testing.provide_metadata
def test_fk_options(self):
"""test that foreign key reflection includes options (on
backends with {dialect}.get_foreign_keys() support)"""
if testing.against('postgresql'):
test_attrs = ('match', 'onupdate', 'ondelete',
'deferrable', 'initially')
addresses_user_id_fkey = sa.ForeignKey(
# Each option is specifically not a Postgres default, or
# it won't be returned by PG's inspection
'users.id',
name='addresses_user_id_fkey',
match='FULL',
onupdate='RESTRICT',
ondelete='RESTRICT',
deferrable=True,
initially='DEFERRED'
)
elif testing.against('mysql'):
# MATCH, DEFERRABLE, and INITIALLY cannot be defined for MySQL
# ON UPDATE and ON DELETE have defaults of RESTRICT, which are
# elided by MySQL's inspection
addresses_user_id_fkey = sa.ForeignKey(
'users.id',
name='addresses_user_id_fkey',
onupdate='CASCADE',
ondelete='CASCADE'
)
test_attrs = ('onupdate', 'ondelete')
meta = self.metadata
Table('users', meta,
Column('id', sa.Integer, primary_key=True),
Column('name', sa.String(30)),
test_needs_fk=True)
Table('addresses', meta,
Column('id', sa.Integer, primary_key=True),
Column('user_id', sa.Integer, addresses_user_id_fkey),
test_needs_fk=True)
meta.create_all()
meta2 = MetaData()
meta2.reflect(testing.db)
for fk in meta2.tables['addresses'].foreign_keys:
ref = addresses_user_id_fkey
for attr in test_attrs:
eq_(getattr(fk, attr), getattr(ref, attr))
def test_pks_not_uniques(self):
"""test that primary key reflection not tripped up by unique
indexes"""
testing.db.execute("""
CREATE TABLE book (
id INTEGER NOT NULL,
title VARCHAR(100) NOT NULL,
series INTEGER,
series_id INTEGER,
UNIQUE(series, series_id),
PRIMARY KEY(id)
)""")
try:
metadata = MetaData(bind=testing.db)
book = Table('book', metadata, autoload=True)
assert book.primary_key.contains_column(book.c.id)
assert not book.primary_key.contains_column(book.c.series)
assert len(book.primary_key) == 1
finally:
testing.db.execute("drop table book")
def test_fk_error(self):
metadata = MetaData(testing.db)
Table('slots', metadata,
Column('slot_id', sa.Integer, primary_key=True),
Column('pkg_id', sa.Integer, sa.ForeignKey('pkgs.pkg_id')),
Column('slot', sa.String(128)),
)
assert_raises_message(
sa.exc.InvalidRequestError,
"Foreign key associated with column 'slots.pkg_id' "
"could not find table 'pkgs' with which to generate "
"a foreign key to target column 'pkg_id'",
metadata.create_all)
def test_composite_pks(self):
"""test reflection of a composite primary key"""
testing.db.execute("""
CREATE TABLE book (
id INTEGER NOT NULL,
isbn VARCHAR(50) NOT NULL,
title VARCHAR(100) NOT NULL,
series INTEGER NOT NULL,
series_id INTEGER NOT NULL,
UNIQUE(series, series_id),
PRIMARY KEY(id, isbn)
)""")
try:
metadata = MetaData(bind=testing.db)
book = Table('book', metadata, autoload=True)
assert book.primary_key.contains_column(book.c.id)
assert book.primary_key.contains_column(book.c.isbn)
assert not book.primary_key.contains_column(book.c.series)
assert len(book.primary_key) == 2
finally:
testing.db.execute("drop table book")
@testing.exclude('mysql', '<', (4, 1, 1), 'innodb funkiness')
@testing.provide_metadata
def test_composite_fk(self):
"""test reflection of composite foreign keys"""
meta = self.metadata
multi = Table(
'multi', meta,
Column('multi_id', sa.Integer, primary_key=True),
Column('multi_rev', sa.Integer, primary_key=True),
Column('multi_hoho', sa.Integer, primary_key=True),
Column('name', sa.String(50), nullable=False),
Column('val', sa.String(100)),
test_needs_fk=True,
)
multi2 = Table('multi2', meta,
Column('id', sa.Integer, primary_key=True),
Column('foo', sa.Integer),
Column('bar', sa.Integer),
Column('lala', sa.Integer),
Column('data', sa.String(50)),
sa.ForeignKeyConstraint(['foo', 'bar', 'lala'],
['multi.multi_id', 'multi.multi_rev', 'multi.multi_hoho'
]),
test_needs_fk=True,
)
meta.create_all()
meta2 = MetaData()
table = Table('multi', meta2, autoload=True,
autoload_with=testing.db)
table2 = Table('multi2', meta2, autoload=True,
autoload_with=testing.db)
self.assert_tables_equal(multi, table)
self.assert_tables_equal(multi2, table2)
j = sa.join(table, table2)
self.assert_(sa.and_(table.c.multi_id == table2.c.foo,
table.c.multi_rev == table2.c.bar,
table.c.multi_hoho
== table2.c.lala).compare(j.onclause))
@testing.crashes('oracle', 'FIXME: unknown, confirm not fails_on')
@testing.requires.check_constraints
@testing.provide_metadata
def test_reserved(self):
# check a table that uses an SQL reserved name doesn't cause an
# error
meta = self.metadata
table_a = Table('select', meta, Column('not', sa.Integer,
primary_key=True), Column('from',
sa.String(12), nullable=False),
sa.UniqueConstraint('from', name='when'))
sa.Index('where', table_a.c['from'])
# There's currently no way to calculate identifier case
# normalization in isolation, so...
if testing.against('firebird', 'oracle'):
check_col = 'TRUE'
else:
check_col = 'true'
quoter = meta.bind.dialect.identifier_preparer.quote_identifier
Table('false', meta,
Column('create', sa.Integer, primary_key=True),
Column('true', sa.Integer, sa.ForeignKey('select.not')),
sa.CheckConstraint('%s <> 1'
% quoter(check_col), name='limit')
)
table_c = Table('is', meta,
Column('or', sa.Integer, nullable=False, primary_key=True),
Column('join', sa.Integer, nullable=False, primary_key=True),
sa.PrimaryKeyConstraint('or', 'join', name='to')
)
index_c = sa.Index('else', table_c.c.join)
meta.create_all()
index_c.drop()
meta2 = MetaData(testing.db)
Table('select', meta2, autoload=True)
Table('false', meta2, autoload=True)
Table('is', meta2, autoload=True)
@testing.provide_metadata
def _test_reflect_uses_bind(self, fn):
from sqlalchemy.pool import AssertionPool
e = engines.testing_engine(options={"poolclass": AssertionPool})
fn(e)
@testing.uses_deprecated()
def test_reflect_uses_bind_constructor_conn(self):
self._test_reflect_uses_bind(lambda e: MetaData(e.connect(),
reflect=True))
@testing.uses_deprecated()
def test_reflect_uses_bind_constructor_engine(self):
self._test_reflect_uses_bind(lambda e: MetaData(e, reflect=True))
def test_reflect_uses_bind_constructor_conn_reflect(self):
self._test_reflect_uses_bind(lambda e: MetaData(e.connect()).reflect())
def test_reflect_uses_bind_constructor_engine_reflect(self):
self._test_reflect_uses_bind(lambda e: MetaData(e).reflect())
def test_reflect_uses_bind_conn_reflect(self):
self._test_reflect_uses_bind(lambda e: MetaData().reflect(e.connect()))
def test_reflect_uses_bind_engine_reflect(self):
self._test_reflect_uses_bind(lambda e: MetaData().reflect(e))
@testing.provide_metadata
def test_reflect_all(self):
existing = testing.db.table_names()
names = ['rt_%s' % name for name in ('a', 'b', 'c', 'd', 'e')]
nameset = set(names)
for name in names:
# be sure our starting environment is sane
self.assert_(name not in existing)
self.assert_('rt_f' not in existing)
baseline = self.metadata
for name in names:
Table(name, baseline, Column('id', sa.Integer, primary_key=True))
baseline.create_all()
m1 = MetaData(testing.db)
self.assert_(not m1.tables)
m1.reflect()
self.assert_(nameset.issubset(set(m1.tables.keys())))
m2 = MetaData()
m2.reflect(testing.db, only=['rt_a', 'rt_b'])
self.assert_(set(m2.tables.keys()) == set(['rt_a', 'rt_b']))
m3 = MetaData()
c = testing.db.connect()
m3.reflect(bind=c, only=lambda name, meta: name == 'rt_c')
self.assert_(set(m3.tables.keys()) == set(['rt_c']))
m4 = MetaData(testing.db)
try:
m4.reflect(only=['rt_a', 'rt_f'])
self.assert_(False)
except sa.exc.InvalidRequestError as e:
self.assert_(e.args[0].endswith('(rt_f)'))
m5 = MetaData(testing.db)
m5.reflect(only=[])
self.assert_(not m5.tables)
m6 = MetaData(testing.db)
m6.reflect(only=lambda n, m: False)
self.assert_(not m6.tables)
m7 = MetaData(testing.db)
m7.reflect()
self.assert_(nameset.issubset(set(m7.tables.keys())))
m8 = MetaData()
assert_raises(
sa.exc.UnboundExecutionError,
m8.reflect
)
m8_e1 = MetaData(testing.db)
rt_c = Table('rt_c', m8_e1)
m8_e1.reflect(extend_existing=True)
eq_(set(m8_e1.tables.keys()), set(names))
eq_(rt_c.c.keys(), ['id'])
m8_e2 = MetaData(testing.db)
rt_c = Table('rt_c', m8_e2)
m8_e2.reflect(extend_existing=True, only=['rt_a', 'rt_c'])
eq_(set(m8_e2.tables.keys()), set(['rt_a', 'rt_c']))
eq_(rt_c.c.keys(), ['id'])
if existing:
print("Other tables present in database, skipping some checks.")
else:
baseline.drop_all()
m9 = MetaData(testing.db)
m9.reflect()
self.assert_(not m9.tables)
def test_reflect_all_conn_closing(self):
m1 = MetaData()
c = testing.db.connect()
m1.reflect(bind=c)
assert not c.closed
def test_inspector_conn_closing(self):
c = testing.db.connect()
inspect(c)
assert not c.closed
@testing.provide_metadata
def test_index_reflection(self):
m1 = self.metadata
t1 = Table('party', m1,
Column('id', sa.Integer, nullable=False),
Column('name', sa.String(20), index=True)
)
sa.Index('idx1', t1.c.id, unique=True)
sa.Index('idx2', t1.c.name, t1.c.id, unique=False)
m1.create_all()
m2 = MetaData(testing.db)
t2 = Table('party', m2, autoload=True)
assert len(t2.indexes) == 3
# Make sure indexes are in the order we expect them in
tmp = [(idx.name, idx) for idx in t2.indexes]
tmp.sort()
r1, r2, r3 = [idx[1] for idx in tmp]
assert r1.name == 'idx1'
assert r2.name == 'idx2'
assert r1.unique == True
assert r2.unique == False
assert r3.unique == False
assert set([t2.c.id]) == set(r1.columns)
assert set([t2.c.name, t2.c.id]) == set(r2.columns)
assert set([t2.c.name]) == set(r3.columns)
@testing.provide_metadata
def test_index_reflection_cols_busted(self):
t = Table('x', self.metadata,
Column('a', Integer), Column('b', Integer))
sa.Index('x_ix', t.c.a, t.c.b)
self.metadata.create_all()
def mock_get_columns(self, connection, table_name, **kw):
return [
{"name": "b", "type": Integer, "primary_key": False}
]
with testing.mock.patch.object(
testing.db.dialect, "get_columns", mock_get_columns):
m = MetaData()
with testing.expect_warnings(
"index key 'a' was not located in columns"):
t = Table('x', m, autoload=True, autoload_with=testing.db)
eq_(list(t.indexes)[0].columns, [t.c.b])
@testing.requires.views
@testing.provide_metadata
def test_views(self):
metadata = self.metadata
users, addresses, dingalings = createTables(metadata)
try:
metadata.create_all()
_create_views(metadata.bind, None)
m2 = MetaData(testing.db)
users_v = Table("users_v", m2, autoload=True)
addresses_v = Table("email_addresses_v", m2, autoload=True)
for c1, c2 in zip(users_v.c, users.c):
eq_(c1.name, c2.name)
self.assert_types_base(c1, c2)
for c1, c2 in zip(addresses_v.c, addresses.c):
eq_(c1.name, c2.name)
self.assert_types_base(c1, c2)
finally:
_drop_views(metadata.bind)
@testing.requires.views
@testing.provide_metadata
def test_reflect_all_with_views(self):
metadata = self.metadata
users, addresses, dingalings = createTables(metadata, None)
try:
metadata.create_all()
_create_views(metadata.bind, None)
m2 = MetaData(testing.db)
m2.reflect(views=False)
eq_(
set(m2.tables),
set(['users', 'email_addresses', 'dingalings'])
)
m2 = MetaData(testing.db)
m2.reflect(views=True)
eq_(
set(m2.tables),
set(['email_addresses_v', 'users_v',
'users', 'dingalings', 'email_addresses'])
)
finally:
_drop_views(metadata.bind)
class CreateDropTest(fixtures.TestBase):
__backend__ = True
@classmethod
def setup_class(cls):
global metadata, users
metadata = MetaData()
users = Table('users', metadata,
Column('user_id', sa.Integer,
sa.Sequence('user_id_seq', optional=True),
primary_key=True),
Column('user_name', sa.String(40)))
Table('email_addresses', metadata,
Column('address_id', sa.Integer,
sa.Sequence('address_id_seq', optional=True),
primary_key=True),
Column('user_id',
sa.Integer, sa.ForeignKey(users.c.user_id)),
Column('email_address', sa.String(40)))
Table(
'orders',
metadata,
Column('order_id', sa.Integer, sa.Sequence('order_id_seq',
optional=True), primary_key=True),
Column('user_id', sa.Integer,
sa.ForeignKey(users.c.user_id)),
Column('description', sa.String(50)),
Column('isopen', sa.Integer),
)
Table('items', metadata,
Column('item_id', sa.INT,
sa.Sequence('items_id_seq', optional=True),
primary_key=True),
Column('order_id',
sa.INT, sa.ForeignKey('orders')),
Column('item_name', sa.VARCHAR(50)))
def test_sorter(self):
tables = metadata.sorted_tables
table_names = [t.name for t in tables]
ua = [n for n in table_names if n in ('users', 'email_addresses')]
oi = [n for n in table_names if n in ('orders', 'items')]
eq_(ua, ['users', 'email_addresses'])
eq_(oi, ['orders', 'items'])
def test_checkfirst(self):
try:
assert not users.exists(testing.db)
users.create(bind=testing.db)
assert users.exists(testing.db)
users.create(bind=testing.db, checkfirst=True)
users.drop(bind=testing.db)
users.drop(bind=testing.db, checkfirst=True)
assert not users.exists(bind=testing.db)
users.create(bind=testing.db, checkfirst=True)
users.drop(bind=testing.db)
finally:
metadata.drop_all(bind=testing.db)
def test_createdrop(self):
metadata.create_all(bind=testing.db)
eq_(testing.db.has_table('items'), True)
eq_(testing.db.has_table('email_addresses'), True)
metadata.create_all(bind=testing.db)
eq_(testing.db.has_table('items'), True)
metadata.drop_all(bind=testing.db)
eq_(testing.db.has_table('items'), False)
eq_(testing.db.has_table('email_addresses'), False)
metadata.drop_all(bind=testing.db)
eq_(testing.db.has_table('items'), False)
def test_tablenames(self):
metadata.create_all(bind=testing.db)
# we only check to see if all the explicitly created tables are
# there, rather than assertEqual -- the test db could have
# "extra" tables if there is a misconfigured template. (*cough*
# tsearch2 w/ the pg windows installer.)
self.assert_(not set(metadata.tables)
- set(testing.db.table_names()))
metadata.drop_all(bind=testing.db)
class SchemaManipulationTest(fixtures.TestBase):
__backend__ = True
def test_append_constraint_unique(self):
meta = MetaData()
users = Table('users', meta, Column('id', sa.Integer))
addresses = Table('addresses', meta,
Column('id', sa.Integer),
Column('user_id', sa.Integer))
fk = sa.ForeignKeyConstraint(['user_id'], [users.c.id])
addresses.append_constraint(fk)
addresses.append_constraint(fk)
assert len(addresses.c.user_id.foreign_keys) == 1
assert addresses.constraints == set([addresses.primary_key, fk])
class UnicodeReflectionTest(fixtures.TestBase):
__backend__ = True
@classmethod
def setup_class(cls):
cls.metadata = metadata = MetaData()
no_multibyte_period = set([
('plain', 'col_plain', 'ix_plain')
])
no_has_table = [
(
'no_has_table_1',
ue('col_Unit\u00e9ble'),
ue('ix_Unit\u00e9ble')
),
(
'no_has_table_2',
ue('col_\u6e2c\u8a66'),
ue('ix_\u6e2c\u8a66')
),
]
no_case_sensitivity = [
(
ue('\u6e2c\u8a66'),
ue('col_\u6e2c\u8a66'),
ue('ix_\u6e2c\u8a66')
),
(
ue('unit\u00e9ble'),
ue('col_unit\u00e9ble'),
ue('ix_unit\u00e9ble')
),
]
full = [
(
ue('Unit\u00e9ble'),
ue('col_Unit\u00e9ble'),
ue('ix_Unit\u00e9ble')
),
(
ue('\u6e2c\u8a66'),
ue('col_\u6e2c\u8a66'),
ue('ix_\u6e2c\u8a66')
),
]
# as you can see, our options for this kind of thing
# are really limited unless you're on PG or SQLite
# forget about it on these backends
if not testing.requires.unicode_ddl.enabled:
names = no_multibyte_period
# mysql can't handle casing usually
elif testing.against("mysql") and \
not testing.requires.mysql_fully_case_sensitive.enabled:
names = no_multibyte_period.union(no_case_sensitivity)
# mssql + pyodbc + freetds can't compare multibyte names to
# information_schema.tables.table_name
elif testing.against("mssql"):
names = no_multibyte_period.union(no_has_table)
else:
names = no_multibyte_period.union(full)
for tname, cname, ixname in names:
t = Table(tname, metadata,
Column('id', sa.Integer,
sa.Sequence(cname + '_id_seq'),
primary_key=True),
Column(cname, Integer)
)
schema.Index(ixname, t.c[cname])
metadata.create_all(testing.db)
cls.names = names
@classmethod
def teardown_class(cls):
cls.metadata.drop_all(testing.db, checkfirst=False)
@testing.requires.unicode_connections
def test_has_table(self):
for tname, cname, ixname in self.names:
assert testing.db.has_table(tname), "Can't detect name %s" % tname
@testing.requires.unicode_connections
def test_basic(self):
# the 'convert_unicode' should not get in the way of the
# reflection process. reflecttable for oracle, postgresql
# (others?) expect non-unicode strings in result sets/bind
# params
bind = testing.db
names = set([rec[0] for rec in self.names])
reflected = set(bind.table_names())
# Jython 2.5 on Java 5 lacks unicodedata.normalize
if not names.issubset(reflected) and hasattr(unicodedata, 'normalize'):
# Python source files in the utf-8 coding seem to
# normalize literals as NFC (and the above are
# explicitly NFC). Maybe this database normalizes NFD
# on reflection.
nfc = set([unicodedata.normalize('NFC', n) for n in names])
self.assert_(nfc == names)
# Yep. But still ensure that bulk reflection and
# create/drop work with either normalization.
r = MetaData(bind)
r.reflect()
r.drop_all(checkfirst=False)
r.create_all(checkfirst=False)
@testing.requires.unicode_connections
def test_get_names(self):
inspector = inspect(testing.db)
names = dict(
(tname, (cname, ixname)) for tname, cname, ixname in self.names
)
for tname in inspector.get_table_names():
assert tname in names
eq_(
[
(rec['name'], rec['column_names'][0])
for rec in inspector.get_indexes(tname)
],
[(names[tname][1], names[tname][0])]
)
class SchemaTest(fixtures.TestBase):
__backend__ = True
@testing.requires.schemas
@testing.requires.cross_schema_fk_reflection
def test_has_schema(self):
eq_(testing.db.dialect.has_schema(testing.db,
testing.config.test_schema), True)
eq_(testing.db.dialect.has_schema(testing.db,
'sa_fake_schema_123'), False)
@testing.requires.schemas
@testing.fails_on('sqlite', 'FIXME: unknown')
@testing.fails_on('sybase', 'FIXME: unknown')
def test_explicit_default_schema(self):
engine = testing.db
engine.connect().close()
if testing.against('sqlite'):
# Works for CREATE TABLE main.foo, SELECT FROM main.foo, etc.,
# but fails on:
# FOREIGN KEY(col2) REFERENCES main.table1 (col1)
schema = 'main'
else:
schema = engine.dialect.default_schema_name
assert bool(schema)
metadata = MetaData(engine)
Table('table1', metadata,
Column('col1', sa.Integer, primary_key=True),
test_needs_fk=True,
schema=schema)
Table('table2', metadata,
Column('col1', sa.Integer, primary_key=True),
Column('col2', sa.Integer,
sa.ForeignKey('%s.table1.col1' % schema)),
test_needs_fk=True,
schema=schema)
try:
metadata.create_all()
metadata.create_all(checkfirst=True)
assert len(metadata.tables) == 2
metadata.clear()
Table('table1', metadata, autoload=True, schema=schema)
Table('table2', metadata, autoload=True, schema=schema)
assert len(metadata.tables) == 2
finally:
metadata.drop_all()
@testing.requires.schemas
@testing.fails_on('sybase', 'FIXME: unknown')
def test_explicit_default_schema_metadata(self):
engine = testing.db
if testing.against('sqlite'):
# Works for CREATE TABLE main.foo, SELECT FROM main.foo, etc.,
# but fails on:
# FOREIGN KEY(col2) REFERENCES main.table1 (col1)
schema = 'main'
else:
schema = engine.dialect.default_schema_name
assert bool(schema)
metadata = MetaData(engine, schema=schema)
Table('table1', metadata,
Column('col1', sa.Integer, primary_key=True),
test_needs_fk=True)
Table('table2', metadata,
Column('col1', sa.Integer, primary_key=True),
Column('col2', sa.Integer,
sa.ForeignKey('table1.col1')),
test_needs_fk=True)
try:
metadata.create_all()
metadata.create_all(checkfirst=True)
assert len(metadata.tables) == 2
metadata.clear()
Table('table1', metadata, autoload=True)
Table('table2', metadata, autoload=True)
assert len(metadata.tables) == 2
finally:
metadata.drop_all()
@testing.requires.schemas
@testing.provide_metadata
def test_metadata_reflect_schema(self):
metadata = self.metadata
createTables(metadata, testing.config.test_schema)
metadata.create_all()
m2 = MetaData(schema=testing.config.test_schema, bind=testing.db)
m2.reflect()
eq_(
set(m2.tables),
set([
'%s.dingalings' % testing.config.test_schema,
'%s.users' % testing.config.test_schema,
'%s.email_addresses' % testing.config.test_schema
])
)
@testing.requires.schemas
@testing.requires.cross_schema_fk_reflection
@testing.provide_metadata
def test_reflect_all_schemas_default_overlap(self):
t1 = Table('t', self.metadata,
Column('id', Integer, primary_key=True))
t2 = Table('t', self.metadata,
Column('id1', sa.ForeignKey('t.id')),
schema=testing.config.test_schema
)
self.metadata.create_all()
m2 = MetaData()
m2.reflect(testing.db, schema=testing.config.test_schema)
m3 = MetaData()
m3.reflect(testing.db)
m3.reflect(testing.db, schema=testing.config.test_schema)
eq_(
set((t.name, t.schema) for t in m2.tables.values()),
set((t.name, t.schema) for t in m3.tables.values())
)
# Tests related to engine.reflection
def createTables(meta, schema=None):
if schema:
schema_prefix = schema + "."
else:
schema_prefix = ""
users = Table('users', meta,
Column('user_id', sa.INT, primary_key=True),
Column('user_name', sa.VARCHAR(20), nullable=False),
Column('test1', sa.CHAR(5), nullable=False),
Column('test2', sa.Float(5), nullable=False),
Column('test3', sa.Text),
Column('test4', sa.Numeric(10, 2), nullable=False),
Column('test5', sa.Date),
Column('test5_1', sa.TIMESTAMP),
Column('parent_user_id', sa.Integer,
sa.ForeignKey('%susers.user_id' % schema_prefix)),
Column('test6', sa.Date, nullable=False),
Column('test7', sa.Text),
Column('test8', sa.LargeBinary),
Column('test_passivedefault2', sa.Integer, server_default='5'),
Column('test9', sa.LargeBinary(100)),
Column('test10', sa.Numeric(10, 2)),
schema=schema,
test_needs_fk=True,
)
dingalings = Table("dingalings", meta,
Column('dingaling_id', sa.Integer, primary_key=True),
Column('address_id', sa.Integer,
sa.ForeignKey(
'%semail_addresses.address_id' % schema_prefix)),
Column('data', sa.String(30)),
schema=schema, test_needs_fk=True,
)
addresses = Table('email_addresses', meta,
Column('address_id', sa.Integer),
Column('remote_user_id', sa.Integer,
sa.ForeignKey(users.c.user_id)),
Column('email_address', sa.String(20)),
sa.PrimaryKeyConstraint('address_id', name='email_ad_pk'),
schema=schema,
test_needs_fk=True,
)
return (users, addresses, dingalings)
def createIndexes(con, schema=None):
fullname = 'users'
if schema:
fullname = "%s.%s" % (schema, 'users')
query = "CREATE INDEX users_t_idx ON %s (test1, test2)" % fullname
con.execute(sa.sql.text(query))
@testing.requires.views
def _create_views(con, schema=None):
for table_name in ('users', 'email_addresses'):
fullname = table_name
if schema:
fullname = "%s.%s" % (schema, table_name)
view_name = fullname + '_v'
query = "CREATE VIEW %s AS SELECT * FROM %s" % (view_name, fullname)
con.execute(sa.sql.text(query))
@testing.requires.views
def _drop_views(con, schema=None):
for table_name in ('email_addresses', 'users'):
fullname = table_name
if schema:
fullname = "%s.%s" % (schema, table_name)
view_name = fullname + '_v'
query = "DROP VIEW %s" % view_name
con.execute(sa.sql.text(query))
class ReverseCasingReflectTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
__backend__ = True
@testing.requires.denormalized_names
def setup(self):
testing.db.execute("""
CREATE TABLE weird_casing(
col1 char(20),
"Col2" char(20),
"col3" char(20)
)
""")
@testing.requires.denormalized_names
def teardown(self):
testing.db.execute("drop table weird_casing")
@testing.requires.denormalized_names
def test_direct_quoting(self):
m = MetaData(testing.db)
t = Table('weird_casing', m, autoload=True)
self.assert_compile(t.select(),
'SELECT weird_casing.col1, '
'weird_casing."Col2", weird_casing."col3" '
'FROM weird_casing')
class CaseSensitiveTest(fixtures.TablesTest):
"""Nail down case sensitive behaviors, mostly on MySQL."""
__backend__ = True
@classmethod
def define_tables(cls, metadata):
Table('SomeTable', metadata,
Column('x', Integer, primary_key=True),
test_needs_fk=True
)
Table('SomeOtherTable', metadata,
Column('x', Integer, primary_key=True),
Column('y', Integer, sa.ForeignKey("SomeTable.x")),
test_needs_fk=True
)
@testing.fails_if(testing.requires._has_mysql_on_windows)
def test_table_names(self):
x = testing.db.run_callable(
testing.db.dialect.get_table_names
)
assert set(["SomeTable", "SomeOtherTable"]).issubset(x)
def test_reflect_exact_name(self):
m = MetaData()
t1 = Table("SomeTable", m, autoload=True, autoload_with=testing.db)
eq_(t1.name, "SomeTable")
assert t1.c.x is not None
@testing.fails_if(lambda:
testing.against(('mysql', '<', (5, 5))) and
not testing.requires._has_mysql_fully_case_sensitive()
)
def test_reflect_via_fk(self):
m = MetaData()
t2 = Table("SomeOtherTable", m, autoload=True,
autoload_with=testing.db)
eq_(t2.name, "SomeOtherTable")
assert "SomeTable" in m.tables
@testing.fails_if(testing.requires._has_mysql_fully_case_sensitive)
@testing.fails_on_everything_except('sqlite', 'mysql', 'mssql')
def test_reflect_case_insensitive(self):
m = MetaData()
t2 = Table("sOmEtAbLe", m, autoload=True, autoload_with=testing.db)
eq_(t2.name, "sOmEtAbLe")
class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase):
__backend__ = True
@classmethod
def setup_class(cls):
cls.metadata = MetaData()
cls.to_reflect = Table(
'to_reflect',
cls.metadata,
Column('x', sa.Integer, primary_key=True),
Column('y', sa.Integer),
test_needs_fk=True
)
cls.related = Table(
'related',
cls.metadata,
Column('q', sa.Integer, sa.ForeignKey('to_reflect.x')),
test_needs_fk=True
)
sa.Index("some_index", cls.to_reflect.c.y)
cls.metadata.create_all(testing.db)
@classmethod
def teardown_class(cls):
cls.metadata.drop_all(testing.db)
def _do_test(self, col, update, assert_, tablename="to_reflect"):
# load the actual Table class, not the test
# wrapper
from sqlalchemy.schema import Table
m = MetaData(testing.db)
def column_reflect(insp, table, column_info):
if column_info['name'] == col:
column_info.update(update)
t = Table(tablename, m, autoload=True, listeners=[
('column_reflect', column_reflect),
])
assert_(t)
m = MetaData(testing.db)
self.event_listen(Table, 'column_reflect', column_reflect)
t2 = Table(tablename, m, autoload=True)
assert_(t2)
def test_override_key(self):
def assertions(table):
eq_(table.c.YXZ.name, "x")
eq_(set(table.primary_key), set([table.c.YXZ]))
self._do_test(
"x", {"key": "YXZ"},
assertions
)
def test_override_index(self):
def assertions(table):
idx = list(table.indexes)[0]
eq_(idx.columns, [table.c.YXZ])
self._do_test(
"y", {"key": "YXZ"},
assertions
)
def test_override_key_fk(self):
m = MetaData(testing.db)
def column_reflect(insp, table, column_info):
if column_info['name'] == 'q':
column_info['key'] = 'qyz'
elif column_info['name'] == 'x':
column_info['key'] = 'xyz'
to_reflect = Table("to_reflect", m, autoload=True, listeners=[
('column_reflect', column_reflect),
])
related = Table("related", m, autoload=True, listeners=[
('column_reflect', column_reflect),
])
assert related.c.qyz.references(to_reflect.c.xyz)
def test_override_type(self):
def assert_(table):
assert isinstance(table.c.x.type, sa.String)
self._do_test(
"x", {"type": sa.String},
assert_
)
def test_override_info(self):
self._do_test(
"x", {"info": {"a": "b"}},
lambda table: eq_(table.c.x.info, {"a": "b"})
)
| mit | 8,664,625,925,115,034,000 | -3,850,615,054,205,796,400 | 34.680762 | 79 | 0.542949 | false |
datjwu/rbtools | rbtools/api/errors.py | 4 | 1553 | from __future__ import unicode_literals
import six
class APIError(Exception):
def __init__(self, http_status, error_code, rsp=None, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
self.http_status = http_status
self.error_code = error_code
self.rsp = rsp
def __str__(self):
code_str = 'HTTP %d' % self.http_status
if self.error_code:
code_str += ', API Error %d' % self.error_code
if self.rsp and 'err' in self.rsp:
return '%s (%s)' % (self.rsp['err']['msg'], code_str)
else:
return code_str
class AuthorizationError(APIError):
pass
class BadRequestError(APIError):
def __str__(self):
lines = [super(BadRequestError, self).__str__()]
if self.rsp and 'fields' in self.rsp:
lines.append('')
for field, error in six.iteritems(self.rsp['fields']):
lines.append(' %s: %s' % (field, '; '.join(error)))
return '\n'.join(lines)
class CacheError(Exception):
"""An exception for caching errors."""
class ServerInterfaceError(Exception):
def __init__(self, msg, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
self.msg = msg
def __str__(self):
return self.msg
API_ERROR_TYPE = {
400: BadRequestError,
401: AuthorizationError,
}
def create_api_error(http_status, *args, **kwargs):
error_type = API_ERROR_TYPE.get(http_status, APIError)
return error_type(http_status, *args, **kwargs)
| mit | 4,279,500,291,655,533,600 | -4,249,282,386,596,790,000 | 23.650794 | 75 | 0.580167 | false |
shakamunyi/tensorflow | tensorflow/contrib/framework/python/ops/audio_ops.py | 80 | 1186 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-short-docstring-punctuation
"""Audio processing and decoding ops.
@@decode_wav
@@encode_wav
@@audio_spectrogram
@@mfcc
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_audio_ops import *
# pylint: enable=wildcard-import
from tensorflow.python.util.all_util import remove_undocumented
remove_undocumented(__name__, [])
| apache-2.0 | 4,865,835,080,649,656,000 | 9,019,342,158,047,919,000 | 31.944444 | 80 | 0.710793 | false |
wbond/subversion | tools/dist/release.py | 2 | 22908 | #!/usr/bin/env python
#
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# About this script:
# This script is intended to simplify creating Subversion releases, by
# automating as much as is possible. It works well with our Apache
# infrastructure, and should make rolling, posting, and announcing
# releases dirt simple.
#
# This script may be run on a number of platforms, but it is intended to
# be run on people.apache.org. As such, it may have dependencies (such
# as Python version) which may not be common, but are guaranteed to be
# available on people.apache.org.
# It'd be kind of nice to use the Subversion python bindings in this script,
# but people.apache.org doesn't currently have them installed
# Stuff we need
import os
import re
import sys
import glob
import shutil
import urllib2
import hashlib
import tarfile
import logging
import datetime
import operator
import itertools
import subprocess
import argparse # standard in Python 2.7
# Find ezt, using Subversion's copy, if there isn't one on the system.
try:
import ezt
except ImportError:
ezt_path = os.path.dirname(os.path.dirname(os.path.abspath(sys.path[0])))
ezt_path = os.path.join(ezt_path, 'build', 'generator')
sys.path.append(ezt_path)
import ezt
# Our required / recommended versions
autoconf_ver = '2.68'
libtool_ver = '2.4'
swig_ver = '2.0.4'
# Some constants
repos = 'http://svn.apache.org/repos/asf/subversion'
people_host = 'minotaur.apache.org'
people_dist_dir = '/www/www.apache.org/dist/subversion'
#----------------------------------------------------------------------
# Utility functions
class Version(object):
regex = re.compile('(\d+).(\d+).(\d+)(?:-(?:(rc|alpha|beta)(\d+)))?')
def __init__(self, ver_str):
match = self.regex.search(ver_str)
if not match:
raise RuntimeError("Bad version string '%s'" % ver_str)
self.major = int(match.group(1))
self.minor = int(match.group(2))
self.patch = int(match.group(3))
if match.group(4):
self.pre = match.group(4)
self.pre_num = int(match.group(5))
else:
self.pre = None
self.pre_num = None
self.base = '%d.%d.%d' % (self.major, self.minor, self.patch)
def is_prerelease(self):
return self.pre != None
def __lt__(self, that):
if self.major < that.major: return True
if self.major > that.major: return False
if self.minor < that.minor: return True
if self.minor > that.minor: return False
if self.patch < that.patch: return True
if self.patch > that.patch: return False
if not self.pre and not that.pre: return False
if not self.pre and that.pre: return False
if self.pre and not that.pre: return True
# We are both pre-releases
if self.pre != that.pre:
return self.pre < that.pre
else:
return self.pre_num < that.pre_num
def __str(self):
if self.pre:
extra = '-%s%d' % (self.pre, self.pre_num)
else:
extra = ''
return self.base + extra
def __repr__(self):
return "Version('%s')" % self.__str()
def __str__(self):
return self.__str()
def get_prefix(base_dir):
return os.path.join(base_dir, 'prefix')
def get_tempdir(base_dir):
return os.path.join(base_dir, 'tempdir')
def get_deploydir(base_dir):
return os.path.join(base_dir, 'deploy')
def get_tmpldir():
return os.path.join(os.path.abspath(sys.path[0]), 'templates')
def get_tmplfile(filename):
try:
return open(os.path.join(get_tmpldir(), filename))
except IOError:
# Hmm, we had a problem with the local version, let's try the repo
return urllib2.urlopen(repos + '/trunk/tools/dist/templates/' + filename)
def get_nullfile():
# This is certainly not cross platform
return open('/dev/null', 'w')
def run_script(verbose, script):
if verbose:
stdout = None
stderr = None
else:
stdout = get_nullfile()
stderr = subprocess.STDOUT
for l in script.split('\n'):
subprocess.check_call(l.split(), stdout=stdout, stderr=stderr)
def download_file(url, target):
response = urllib2.urlopen(url)
target_file = open(target, 'w')
target_file.write(response.read())
def assert_people():
if os.uname()[1] != people_host:
raise RuntimeError('Not running on expected host "%s"' % people_host)
#----------------------------------------------------------------------
# Cleaning up the environment
def cleanup(args):
'Remove generated files and folders.'
logging.info('Cleaning')
shutil.rmtree(get_prefix(args.base_dir), True)
shutil.rmtree(get_tempdir(args.base_dir), True)
shutil.rmtree(get_deploydir(args.base_dir), True)
#----------------------------------------------------------------------
# Creating an environment to roll the release
class RollDep(object):
'The super class for each of the build dependencies.'
def __init__(self, base_dir, use_existing, verbose):
self._base_dir = base_dir
self._use_existing = use_existing
self._verbose = verbose
def _test_version(self, cmd):
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
(stdout, stderr) = proc.communicate()
rc = proc.wait()
if rc: return ''
return stdout.split('\n')
def build(self):
if not hasattr(self, '_extra_configure_flags'):
self._extra_configure_flags = ''
cwd = os.getcwd()
tempdir = get_tempdir(self._base_dir)
tarball = os.path.join(tempdir, self._filebase + '.tar.gz')
if os.path.exists(tarball):
if not self._use_existing:
raise RuntimeError('autoconf tarball "%s" already exists'
% tarball)
logging.info('Using existing %s.tar.gz' % self._filebase)
else:
logging.info('Fetching %s' % self._filebase)
download_file(self._url, tarball)
# Extract tarball
tarfile.open(tarball).extractall(tempdir)
logging.info('Building ' + self.label)
os.chdir(os.path.join(tempdir, self._filebase))
run_script(self._verbose,
'''./configure --prefix=%s %s
make
make install''' % (get_prefix(self._base_dir),
self._extra_configure_flags))
os.chdir(cwd)
class AutoconfDep(RollDep):
def __init__(self, base_dir, use_existing, verbose):
RollDep.__init__(self, base_dir, use_existing, verbose)
self.label = 'autoconf'
self._filebase = 'autoconf-' + autoconf_ver
self._url = 'http://ftp.gnu.org/gnu/autoconf/%s.tar.gz' % self._filebase
def have_usable(self):
output = self._test_version(['autoconf', '-V'])
if not output: return False
version = output[0].split()[-1:][0]
return version == autoconf_ver
def use_system(self):
if not self._use_existing: return False
return self.have_usable()
class LibtoolDep(RollDep):
def __init__(self, base_dir, use_existing, verbose):
RollDep.__init__(self, base_dir, use_existing, verbose)
self.label = 'libtool'
self._filebase = 'libtool-' + libtool_ver
self._url = 'http://ftp.gnu.org/gnu/libtool/%s.tar.gz' % self._filebase
def have_usable(self):
output = self._test_version(['libtool', '--version'])
if not output: return False
version = output[0].split()[-1:][0]
return version == libtool_ver
def use_system(self):
# We unconditionally return False here, to avoid using a borked
# system libtool (I'm looking at you, Debian).
return False
class SwigDep(RollDep):
def __init__(self, base_dir, use_existing, verbose, sf_mirror):
RollDep.__init__(self, base_dir, use_existing, verbose)
self.label = 'swig'
self._filebase = 'swig-' + swig_ver
self._url = 'http://sourceforge.net/projects/swig/files/swig/%(swig)s/%(swig)s.tar.gz/download?use_mirror=%(sf_mirror)s' % \
{ 'swig' : self._filebase,
'sf_mirror' : sf_mirror }
self._extra_configure_flags = '--without-pcre'
def have_usable(self):
output = self._test_version(['swig', '-version'])
if not output: return False
version = output[1].split()[-1:][0]
return version == swig_ver
def use_system(self):
if not self._use_existing: return False
return self.have_usable()
def build_env(args):
'Download prerequisites for a release and prepare the environment.'
logging.info('Creating release environment')
try:
os.mkdir(get_prefix(args.base_dir))
os.mkdir(get_tempdir(args.base_dir))
except OSError:
if not args.use_existing:
raise
autoconf = AutoconfDep(args.base_dir, args.use_existing, args.verbose)
libtool = LibtoolDep(args.base_dir, args.use_existing, args.verbose)
swig = SwigDep(args.base_dir, args.use_existing, args.verbose,
args.sf_mirror)
# iterate over our rolling deps, and build them if needed
for dep in [autoconf, libtool, swig]:
if dep.use_system():
logging.info('Using system %s' % dep.label)
else:
dep.build()
#----------------------------------------------------------------------
# Create release artifacts
def roll_tarballs(args):
'Create the release artifacts.'
extns = ['zip', 'tar.gz', 'tar.bz2']
if args.branch:
branch = args.branch
else:
branch = args.version.base[:-1] + 'x'
logging.info('Rolling release %s from branch %s@%d' % (args.version,
branch, args.revnum))
# Ensure we've got the appropriate rolling dependencies available
autoconf = AutoconfDep(args.base_dir, False, args.verbose)
libtool = LibtoolDep(args.base_dir, False, args.verbose)
swig = SwigDep(args.base_dir, False, args.verbose, None)
for dep in [autoconf, libtool, swig]:
if not dep.have_usable():
raise RuntimeError('Cannot find usable %s' % dep.label)
# Make sure CHANGES is sync'd
if branch != 'trunk':
trunk_CHANGES = '%s/trunk/CHANGES@%d' % (repos, args.revnum)
branch_CHANGES = '%s/branches/%s/CHANGES@%d' % (repos, branch,
args.revnum)
proc = subprocess.Popen(['svn', 'diff', '--summarize', branch_CHANGES,
trunk_CHANGES],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
(stdout, stderr) = proc.communicate()
proc.wait()
if stdout:
raise RuntimeError('CHANGES not synced between trunk and branch')
# Create the output directory
if not os.path.exists(get_deploydir(args.base_dir)):
os.mkdir(get_deploydir(args.base_dir))
# For now, just delegate to dist.sh to create the actual artifacts
extra_args = ''
if args.version.is_prerelease():
extra_args = '-%s %d' % (args.version.pre, args.version.pre_num)
logging.info('Building UNIX tarballs')
run_script(args.verbose, '%s/dist.sh -v %s -pr %s -r %d %s'
% (sys.path[0], args.version.base, branch, args.revnum,
extra_args) )
logging.info('Buildling Windows tarballs')
run_script(args.verbose, '%s/dist.sh -v %s -pr %s -r %d -zip %s'
% (sys.path[0], args.version.base, branch, args.revnum,
extra_args) )
# Move the results to the deploy directory
logging.info('Moving artifacts and calculating checksums')
for e in extns:
if args.version.pre == 'nightly':
filename = 'subversion-trunk.%s' % e
else:
filename = 'subversion-%s.%s' % (args.version, e)
shutil.move(filename, get_deploydir(args.base_dir))
filename = os.path.join(get_deploydir(args.base_dir), filename)
m = hashlib.sha1()
m.update(open(filename, 'r').read())
open(filename + '.sha1', 'w').write(m.hexdigest())
shutil.move('svn_version.h.dist', get_deploydir(args.base_dir))
# And we're done!
#----------------------------------------------------------------------
# Post the candidate release artifacts
def post_candidates(args):
'Post the generated tarballs to web-accessible directory.'
if args.target:
target = args.target
else:
target = os.path.join(os.getenv('HOME'), 'public_html', 'svn',
args.version)
if args.code_name:
dirname = args.code_name
else:
dirname = 'deploy'
if not os.path.exists(target):
os.makedirs(target)
data = { 'version' : args.version,
'revnum' : args.revnum,
'dirname' : dirname,
}
# Choose the right template text
if args.version.is_prerelease():
if args.version.pre == 'nightly':
template_filename = 'nightly-candidates.ezt'
else:
template_filename = 'rc-candidates.ezt'
else:
template_filename = 'stable-candidates.ezt'
template = ezt.Template()
template.parse(get_tmplfile(template_filename).read())
template.generate(open(os.path.join(target, 'index.html'), 'w'), data)
logging.info('Moving tarballs to %s' % os.path.join(target, dirname))
if os.path.exists(os.path.join(target, dirname)):
shutil.rmtree(os.path.join(target, dirname))
shutil.copytree(get_deploydir(args.base_dir), os.path.join(target, dirname))
#----------------------------------------------------------------------
# Clean dist
def clean_dist(args):
'Clean the distribution directory of all but the most recent artifacts.'
regex = re.compile('subversion-(\d+).(\d+).(\d+)(?:-(?:(rc|alpha|beta)(\d+)))?')
if not args.dist_dir:
assert_people()
args.dist_dir = people_dist_dir
logging.info('Cleaning dist dir \'%s\'' % args.dist_dir)
filenames = glob.glob(os.path.join(args.dist_dir, 'subversion-*.tar.gz'))
versions = []
for filename in filenames:
versions.append(Version(filename))
for k, g in itertools.groupby(sorted(versions),
lambda x: (x.major, x.minor)):
releases = list(g)
logging.info("Saving release '%s'", releases[-1])
for r in releases[:-1]:
for filename in glob.glob(os.path.join(args.dist_dir,
'subversion-%s.*' % r)):
logging.info("Removing '%s'" % filename)
os.remove(filename)
#----------------------------------------------------------------------
# Write announcements
def write_news(args):
'Write text for the Subversion website.'
data = { 'date' : datetime.date.today().strftime('%Y%m%d'),
'date_pres' : datetime.date.today().strftime('%Y-%m-%d'),
'version' : str(args.version),
'version_base' : args.version.base,
}
if args.version.is_prerelease():
template_filename = 'rc-news.ezt'
else:
template_filename = 'stable-news.ezt'
template = ezt.Template()
template.parse(get_tmplfile(template_filename).read())
template.generate(sys.stdout, data)
def get_sha1info(args):
'Return a list of sha1 info for the release'
sha1s = glob.glob(os.path.join(get_deploydir(args.base_dir), '*.sha1'))
class info(object):
pass
sha1info = []
for s in sha1s:
i = info()
i.filename = os.path.basename(s)[:-5]
i.sha1 = open(s, 'r').read()
sha1info.append(i)
return sha1info
def write_announcement(args):
'Write the release announcement.'
sha1info = get_sha1info(args)
data = { 'version' : args.version,
'sha1info' : sha1info,
'siginfo' : open('getsigs-output', 'r').read(),
'major-minor' : args.version.base[:3],
'major-minor-patch' : args.version.base,
}
if args.version.is_prerelease():
template_filename = 'rc-release-ann.ezt'
else:
template_filename = 'stable-release-ann.ezt'
template = ezt.Template(compress_whitespace = False)
template.parse(get_tmplfile(template_filename).read())
template.generate(sys.stdout, data)
#----------------------------------------------------------------------
# Main entry point for argument parsing and handling
def main():
'Parse arguments, and drive the appropriate subcommand.'
# Setup our main parser
parser = argparse.ArgumentParser(
description='Create an Apache Subversion release.')
parser.add_argument('--clean', action='store_true', default=False,
help='Remove any directories previously created by %(prog)s')
parser.add_argument('--verbose', action='store_true', default=False,
help='Increase output verbosity')
parser.add_argument('--base-dir', default=os.getcwd(),
help='''The directory in which to create needed files and
folders. The default is the current working
directory.''')
subparsers = parser.add_subparsers(title='subcommands')
# Setup the parser for the build-env subcommand
subparser = subparsers.add_parser('build-env',
help='''Download release prerequisistes, including autoconf,
libtool, and swig.''')
subparser.set_defaults(func=build_env)
subparser.add_argument('--sf-mirror', default='softlayer',
help='''The mirror to use for downloading files from
SourceForge. If in the EU, you may want to use
'kent' for this value.''')
subparser.add_argument('--use-existing', action='store_true', default=False,
help='''Attempt to use existing build dependencies before
downloading and building a private set.''')
# Setup the parser for the roll subcommand
subparser = subparsers.add_parser('roll',
help='''Create the release artifacts.''')
subparser.set_defaults(func=roll_tarballs)
subparser.add_argument('version', type=Version,
help='''The release label, such as '1.7.0-alpha1'.''')
subparser.add_argument('revnum', type=int,
help='''The revision number to base the release on.''')
subparser.add_argument('--branch',
help='''The branch to base the release on.''')
# Setup the parser for the post-candidates subcommand
subparser = subparsers.add_parser('post-candidates',
help='''Build the website to host the candidate tarballs.
The default location is somewhere in ~/public_html.
''')
subparser.set_defaults(func=post_candidates)
subparser.add_argument('version', type=Version,
help='''The release label, such as '1.7.0-alpha1'.''')
subparser.add_argument('revnum', type=int,
help='''The revision number to base the release on.''')
subparser.add_argument('--target',
help='''The full path to the destination.''')
subparser.add_argument('--code-name',
help='''A whimsical name for the release, used only for
naming the download directory.''')
# The clean-dist subcommand
subparser = subparsers.add_parser('clean-dist',
help='''Clean the distribution directory (and mirrors) of
all but the most recent MAJOR.MINOR release. If no
dist-dir is given, this command will assume it is
running on people.apache.org.''')
subparser.set_defaults(func=clean_dist)
subparser.add_argument('--dist-dir',
help='''The directory to clean.''')
# The write-news subcommand
subparser = subparsers.add_parser('write-news',
help='''Output to stdout template text for use in the news
section of the Subversion website.''')
subparser.set_defaults(func=write_news)
subparser.add_argument('version', type=Version,
help='''The release label, such as '1.7.0-alpha1'.''')
subparser = subparsers.add_parser('write-announcement',
help='''Output to stdout template text for the emailed
release announcement.''')
subparser.set_defaults(func=write_announcement)
subparser.add_argument('version', type=Version,
help='''The release label, such as '1.7.0-alpha1'.''')
# A meta-target
subparser = subparsers.add_parser('clean',
help='''The same as the '--clean' switch, but as a
separate subcommand.''')
subparser.set_defaults(func=cleanup)
# Parse the arguments
args = parser.parse_args()
# first, process any global operations
if args.clean:
cleanup(args)
# Set up logging
logger = logging.getLogger()
if args.verbose:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
# Fix up our path so we can use our installed versions
os.environ['PATH'] = os.path.join(get_prefix(args.base_dir), 'bin') + ':' \
+ os.environ['PATH']
# finally, run the subcommand, and give it the parsed arguments
args.func(args)
if __name__ == '__main__':
main()
| apache-2.0 | -5,585,293,016,304,549,000 | 3,157,997,820,508,256,000 | 34.297381 | 132 | 0.581587 | false |
rouault/Quantum-GIS | tests/src/python/test_qgsserver_wms_getlegendgraphic.py | 4 | 18330 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsServer WMS.
From build dir, run: ctest -R PyQgsServerWMS -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Alessandro Pasotti'
__date__ = '25/05/2015'
__copyright__ = 'Copyright 2015, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
# Needed on Qt 5 so that the serialization of XML is consistent among all executions
os.environ['QT_HASH_SEED'] = '1'
import re
import urllib.request
import urllib.parse
import urllib.error
from qgis.testing import unittest
from qgis.PyQt.QtCore import QSize
import osgeo.gdal # NOQA
from test_qgsserver import QgsServerTestBase
from qgis.core import QgsProject
# Strip path and content length because path may vary
RE_STRIP_UNCHECKABLE = b'MAP=[^"]+|Content-Length: \d+'
RE_ATTRIBUTES = b'[^>\s]+=[^>\s]+'
class TestQgsServerWMSGetLegendGraphic(QgsServerTestBase):
"""QGIS Server WMS Tests for GetLegendGraphic request"""
def test_getLegendGraphics(self):
"""Test that does not return an exception but an image"""
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
# 'WIDTH': '20', # optional
# 'HEIGHT': '20', # optional
'LAYER': 'testlayer%20èé',
}
qs = '?' + '&'.join(["%s=%s" % (k, v) for k, v in parms.items()])
h, r = self._execute_request(qs)
self.assertEqual(-1, h.find(b'Content-Type: text/xml; charset=utf-8'), "Header: %s\nResponse:\n%s" % (h, r))
self.assertNotEqual(-1, h.find(b'Content-Type: image/png'), "Header: %s\nResponse:\n%s" % (h, r))
def test_getLegendGraphics_invalid_parameters(self):
"""Test that does return an exception"""
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello,db_point",
"LAYERTITLE": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"RULE": "1",
"BBOX": "-151.7,-38.9,51.0,78.0",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
err = b"BBOX parameter cannot be combined with RULE" in r
self.assertTrue(err)
def test_wms_GetLegendGraphic_LayerSpace(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"FORMAT": "image/png",
# "HEIGHT": "500",
# "WIDTH": "500",
"LAYERSPACE": "50.0",
"LAYERFONTBOLD": "TRUE",
"LAYERFONTSIZE": "30",
"ITEMFONTBOLD": "TRUE",
"ITEMFONTSIZE": "20",
"LAYERFONTFAMILY": self.fontFamily,
"ITEMFONTFAMILY": self.fontFamily,
"LAYERTITLE": "TRUE",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_LayerSpace")
def test_wms_GetLegendGraphic_ShowFeatureCount(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"FORMAT": "image/png",
# "HEIGHT": "500",
# "WIDTH": "500",
"LAYERTITLE": "TRUE",
"LAYERFONTBOLD": "TRUE",
"LAYERFONTSIZE": "30",
"LAYERFONTFAMILY": self.fontFamily,
"ITEMFONTFAMILY": self.fontFamily,
"ITEMFONTBOLD": "TRUE",
"ITEMFONTSIZE": "20",
"SHOWFEATURECOUNT": "TRUE",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ShowFeatureCount", max_size_diff=QSize(1, 1))
def test_getLegendGraphics_layertitle(self):
"""Test that does not return an exception but an image"""
print("TEST FONT FAMILY: ", self.fontFamily)
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
# 'WIDTH': '20', # optional
# 'HEIGHT': '20', # optional
'LAYER': u'testlayer%20èé',
'LAYERFONTBOLD': 'TRUE',
'LAYERFONTSIZE': '30',
'ITEMFONTBOLD': 'TRUE',
'LAYERFONTFAMILY': self.fontFamily,
'ITEMFONTFAMILY': self.fontFamily,
'ITEMFONTSIZE': '20',
'LAYERTITLE': 'TRUE',
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_test", 250, QSize(15, 15))
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
# 'WIDTH': '20', # optional
# 'HEIGHT': '20', # optional
'LAYER': u'testlayer%20èé',
'LAYERTITLE': 'FALSE',
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_test_layertitle_false", 250, QSize(15, 15))
def test_getLegendGraphics_rulelabel(self):
"""Test that does not return an exception but an image"""
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
'LAYER': u'testlayer%20èé',
'LAYERFONTBOLD': 'TRUE',
'LAYERFONTSIZE': '30',
'LAYERFONTFAMILY': self.fontFamily,
'ITEMFONTFAMILY': self.fontFamily,
'ITEMFONTBOLD': 'TRUE',
'ITEMFONTSIZE': '20',
'RULELABEL': 'TRUE',
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_test", 250, QSize(15, 15))
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
'LAYER': u'testlayer%20èé',
'LAYERFONTBOLD': 'TRUE',
'LAYERFONTSIZE': '30',
'ITEMFONTBOLD': 'TRUE',
'ITEMFONTSIZE': '20',
'LAYERFONTFAMILY': self.fontFamily,
'ITEMFONTFAMILY': self.fontFamily,
'RULELABEL': 'FALSE',
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_rulelabel_false", 250, QSize(15, 15))
def test_getLegendGraphics_rule(self):
"""Test that does not return an exception but an image"""
parms = {
'MAP': self.testdata_path + "test_project_legend_rule.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
'LAYER': u'testlayer%20èé',
'WIDTH': '20',
'HEIGHT': '20',
'RULE': 'rule0',
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_rule0", 250, QSize(15, 15))
parms = {
'MAP': self.testdata_path + "test_project_legend_rule.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
'LAYER': u'testlayer%20èé',
'WIDTH': '20',
'HEIGHT': '20',
'RULE': 'rule1',
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_rule1", 250, QSize(15, 15))
def test_wms_GetLegendGraphic_Basic(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_Basic")
def test_wms_GetLegendGraphic_Transparent(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857",
"TRANSPARENT": "TRUE"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_Transparent")
def test_wms_GetLegendGraphic_Background(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857",
"BGCOLOR": "green"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_Background")
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857",
"BGCOLOR": "0x008000"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_Background_Hex")
def test_wms_GetLegendGraphic_BoxSpace(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"BOXSPACE": "100",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_BoxSpace")
def test_wms_GetLegendGraphic_SymbolSpace(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"SYMBOLSPACE": "100",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_SymbolSpace")
def test_wms_GetLegendGraphic_IconLabelSpace(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"ICONLABELSPACE": "100",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_IconLabelSpace")
def test_wms_GetLegendGraphic_SymbolSize(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"SYMBOLWIDTH": "50",
"SYMBOLHEIGHT": "30",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_SymbolSize")
def test_wms_GetLegendGraphic_LayerFont(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "TRUE",
"LAYERFONTBOLD": "TRUE",
"LAYERFONTITALIC": "TRUE",
"LAYERFONTSIZE": "30",
"ITEMFONTBOLD": "TRUE",
"ITEMFONTSIZE": "20",
"LAYERFONTFAMILY": self.fontFamily,
"ITEMFONTFAMILY": self.fontFamily,
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_LayerFont", max_size_diff=QSize(1, 1))
def test_wms_GetLegendGraphic_ItemFont(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "TRUE",
"LAYERFONTBOLD": "TRUE",
"LAYERFONTSIZE": "30",
"ITEMFONTBOLD": "TRUE",
"ITEMFONTITALIC": "TRUE",
"ITEMFONTSIZE": "20",
"LAYERFONTFAMILY": self.fontFamily,
"ITEMFONTFAMILY": self.fontFamily,
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ItemFont", max_size_diff=QSize(1, 1))
def test_wms_GetLegendGraphic_BBox(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello,db_point",
"LAYERTITLE": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"BBOX": "-151.7,-38.9,51.0,78.0",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_BBox")
def test_wms_GetLegendGraphic_BBox2(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello,db_point",
"LAYERTITLE": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"BBOX": "-76.08,-6.4,-19.38,38.04",
"SRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_BBox2")
def test_wms_GetLegendGraphic_EmptyLegend(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_contextual_legend.qgs',
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "QGIS%20Server%20Hello%20World",
"FORMAT": "image/png",
"HEIGHT": "840",
"WIDTH": "1226",
"BBOX": "10.38450,-49.6370,73.8183,42.9461",
"SRS": "EPSG:4326",
"SCALE": "15466642"
}.items())])
h, r = self._execute_request(qs)
self.assertEqual(-1, h.find(b'Content-Type: text/xml; charset=utf-8'), "Header: %s\nResponse:\n%s" % (h, r))
self.assertNotEqual(-1, h.find(b'Content-Type: image/png'), "Header: %s\nResponse:\n%s" % (h, r))
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | 1,800,391,665,494,420,700 | 886,194,726,987,784,200 | 36.15213 | 116 | 0.498853 | false |
stephane-martin/salt-debian-packaging | salt-2016.3.3/salt/modules/rvm.py | 2 | 11310 | # -*- coding: utf-8 -*-
'''
Manage ruby installations and gemsets with RVM, the Ruby Version Manager.
'''
from __future__ import absolute_import
# Import python libs
import re
import os
import logging
# Import salt libs
import salt.utils
from salt.exceptions import CommandExecutionError
log = logging.getLogger(__name__)
# Don't shadow built-in's.
__func_alias__ = {
'list_': 'list'
}
__opts__ = {
'rvm.runas': None,
}
def _get_rvm_location(runas=None):
if runas:
runas_home = os.path.expanduser('~{0}'.format(runas))
rvmpath = '{0}/.rvm/bin/rvm'.format(runas_home)
if os.path.exists(rvmpath):
return [rvmpath]
return ['/usr/local/rvm/bin/rvm']
def _rvm(command, runas=None, cwd=None):
if runas is None:
runas = __salt__['config.option']('rvm.runas')
if not is_installed(runas):
return False
cmd = _get_rvm_location(runas) + command
ret = __salt__['cmd.run_all'](cmd,
runas=runas,
cwd=cwd,
python_shell=False)
if ret['retcode'] == 0:
return ret['stdout']
return False
def _rvm_do(ruby, command, runas=None, cwd=None):
return _rvm([ruby or 'default', 'do'] + command, runas=runas, cwd=cwd)
def is_installed(runas=None):
'''
Check if RVM is installed.
CLI Example:
.. code-block:: bash
salt '*' rvm.is_installed
'''
try:
return __salt__['cmd.has_exec'](_get_rvm_location(runas)[0])
except IndexError:
return False
def install(runas=None):
'''
Install RVM system-wide
runas
The user under which to run the rvm installer script. If not specified,
then it be run as the user under which Salt is running.
CLI Example:
.. code-block:: bash
salt '*' rvm.install
'''
# RVM dependencies on Ubuntu 10.04:
# bash coreutils gzip bzip2 gawk sed curl git-core subversion
installer = 'https://raw.githubusercontent.com/rvm/rvm/master/binscripts/rvm-installer'
ret = __salt__['cmd.run_all'](
# the RVM installer automatically does a multi-user install when it is
# invoked with root privileges
'curl -Ls {installer} | bash -s stable'.format(installer=installer),
runas=runas,
python_shell=True
)
if ret['retcode'] > 0:
msg = 'Error encountered while downloading the RVM installer'
if ret['stderr']:
msg += '. stderr follows:\n\n' + ret['stderr']
raise CommandExecutionError(msg)
return True
def install_ruby(ruby, runas=None):
'''
Install a ruby implementation.
ruby
The version of ruby to install
runas
The user under which to run rvm. If not specified, then rvm will be run
as the user under which Salt is running.
CLI Example:
.. code-block:: bash
salt '*' rvm.install_ruby 1.9.3-p385
'''
# MRI/RBX/REE dependencies for Ubuntu 10.04:
# build-essential openssl libreadline6 libreadline6-dev curl
# git-core zlib1g zlib1g-dev libssl-dev libyaml-dev libsqlite3-0
# libsqlite3-dev sqlite3 libxml2-dev libxslt1-dev autoconf libc6-dev
# libncurses5-dev automake libtool bison subversion ruby
if runas and runas != 'root':
_rvm(['autolibs', 'disable', ruby], runas=runas)
return _rvm(['install', '--disable-binary', ruby], runas=runas)
else:
return _rvm(['install', ruby], runas=runas)
def reinstall_ruby(ruby, runas=None):
'''
Reinstall a ruby implementation
ruby
The version of ruby to reinstall
runas
The user under which to run rvm. If not specified, then rvm will be run
as the user under which Salt is running.
CLI Example:
.. code-block:: bash
salt '*' rvm.reinstall_ruby 1.9.3-p385
'''
return _rvm(['reinstall', ruby], runas=runas)
def list_(runas=None):
'''
List all rvm-installed rubies
runas
The user under which to run rvm. If not specified, then rvm will be run
as the user under which Salt is running.
CLI Example:
.. code-block:: bash
salt '*' rvm.list
'''
rubies = []
output = _rvm(['list'], runas=runas)
if output:
regex = re.compile(r'^[= ]([*> ]) ([^- ]+)-([^ ]+) \[ (.*) \]')
for line in output.splitlines():
match = regex.match(line)
if match:
rubies.append([
match.group(2), match.group(3), match.group(1) == '*'
])
return rubies
def set_default(ruby, runas=None):
'''
Set the default ruby
ruby
The version of ruby to make the default
runas
The user under which to run rvm. If not specified, then rvm will be run
as the user under which Salt is running.
CLI Example:
.. code-block:: bash
salt '*' rvm.set_default 2.0.0
'''
return _rvm(['alias', 'create', 'default', ruby], runas=runas)
def get(version='stable', runas=None):
'''
Update RVM
version : stable
Which version of RVM to install, (e.g. stable or head)
CLI Example:
.. code-block:: bash
salt '*' rvm.get
'''
return _rvm(['get', version], runas=runas)
def wrapper(ruby_string, wrapper_prefix, runas=None, *binaries):
'''
Install RVM wrapper scripts
ruby_string
Ruby/gemset to install wrappers for
wrapper_prefix
What to prepend to the name of the generated wrapper binaries
runas
The user under which to run rvm. If not specified, then rvm will be run
as the user under which Salt is running.
binaries : None
The names of the binaries to create wrappers for. When nothing is
given, wrappers for ruby, gem, rake, irb, rdoc, ri and testrb are
generated.
CLI Example:
.. code-block:: bash
salt '*' rvm.wrapper <ruby_string> <wrapper_prefix>
'''
cmd = ['wrapper', ruby_string, wrapper_prefix]
cmd.extend(binaries)
return _rvm(cmd, runas=runas)
def rubygems(ruby, version, runas=None):
'''
Installs a specific rubygems version in the given ruby
ruby
The ruby for which to install rubygems
version
The version of rubygems to install, or 'remove' to use the version that
ships with 1.9
runas
The user under which to run rvm. If not specified, then rvm will be run
as the user under which Salt is running.
CLI Example:
.. code-block:: bash
salt '*' rvm.rubygems 2.0.0 1.8.24
'''
return _rvm_do(ruby, ['rubygems', version], runas=runas)
def gemset_create(ruby, gemset, runas=None):
'''
Creates a gemset.
ruby
The ruby version for which to create the gemset
gemset
The name of the gemset to create
runas
The user under which to run rvm. If not specified, then rvm will be run
as the user under which Salt is running.
CLI Example:
.. code-block:: bash
salt '*' rvm.gemset_create 2.0.0 foobar
'''
return _rvm_do(ruby, ['rvm', 'gemset', 'create', gemset], runas=runas)
def gemset_list(ruby='default', runas=None):
'''
List all gemsets for the given ruby.
ruby : default
The ruby version for which to list the gemsets
runas
The user under which to run rvm. If not specified, then rvm will be run
as the user under which Salt is running.
CLI Example:
.. code-block:: bash
salt '*' rvm.gemset_list
'''
gemsets = []
output = _rvm_do(ruby, ['rvm', 'gemset', 'list'], runas=runas)
if output:
regex = re.compile('^ ([^ ]+)')
for line in output.splitlines():
match = regex.match(line)
if match:
gemsets.append(match.group(1))
return gemsets
def gemset_delete(ruby, gemset, runas=None):
'''
Delete a gemset
ruby
The ruby version to which the gemset belongs
gemset
The gemset to delete
runas
The user under which to run rvm. If not specified, then rvm will be run
as the user under which Salt is running.
CLI Example:
.. code-block:: bash
salt '*' rvm.gemset_delete 2.0.0 foobar
'''
return _rvm_do(ruby,
['rvm', '--force', 'gemset', 'delete', gemset],
runas=runas)
def gemset_empty(ruby, gemset, runas=None):
'''
Remove all gems from a gemset.
ruby
The ruby version to which the gemset belongs
gemset
The gemset to empty
runas
The user under which to run rvm. If not specified, then rvm will be run
as the user under which Salt is running.
CLI Example:
.. code-block:: bash
salt '*' rvm.gemset_empty 2.0.0 foobar
'''
return _rvm_do(ruby,
['rvm', '--force', 'gemset', 'empty', gemset],
runas=runas)
def gemset_copy(source, destination, runas=None):
'''
Copy all gems from one gemset to another.
source
The name of the gemset to copy, complete with ruby version
destination
The destination gemset
runas
The user under which to run rvm. If not specified, then rvm will be run
as the user under which Salt is running.
CLI Example:
.. code-block:: bash
salt '*' rvm.gemset_copy foobar bazquo
'''
return _rvm(['gemset', 'copy', source, destination], runas=runas)
def gemset_list_all(runas=None):
'''
List all gemsets for all installed rubies.
Note that you must have set a default ruby before this can work.
runas
The user under which to run rvm. If not specified, then rvm will be run
as the user under which Salt is running.
CLI Example:
.. code-block:: bash
salt '*' rvm.gemset_list_all
'''
gemsets = {}
current_ruby = None
output = _rvm_do('default', ['rvm', 'gemset', 'list_all'], runas=runas)
if output:
gems_regex = re.compile('^ ([^ ]+)')
gemset_regex = re.compile('^gemsets for ([^ ]+)')
for line in output.splitlines():
match = gemset_regex.match(line)
if match:
current_ruby = match.group(1)
gemsets[current_ruby] = []
match = gems_regex.match(line)
if match:
gemsets[current_ruby].append(match.group(1))
return gemsets
def do(ruby, command, runas=None, cwd=None): # pylint: disable=C0103
'''
Execute a command in an RVM controlled environment.
ruby
Which ruby to use
command
The rvm command to execute
runas
The user under which to run rvm. If not specified, then rvm will be run
as the user under which Salt is running.
cwd
The directory from which to run the rvm command. Defaults to the user's
home directory.
CLI Example:
.. code-block:: bash
salt '*' rvm.do 2.0.0 <command>
'''
try:
command = salt.utils.shlex_split(command)
except AttributeError:
command = salt.utils.shlex_split(str(command))
return _rvm_do(ruby, command, runas=runas, cwd=cwd)
| apache-2.0 | -2,106,102,134,708,800,800 | -5,000,874,546,302,150,000 | 23.694323 | 91 | 0.594518 | false |
dwalton76/ev3dev-lang-python | tests/motor/motor_param_unittest.py | 1 | 24419 | #!/usr/bin/env python
# Based on the parameterized test case technique described here:
#
# http://eli.thegreenplace.net/2011/08/02/python-unit-testing-parametrized-test-cases
import unittest
import time
import sys
import ev3dev.ev3 as ev3
import parameterizedtestcase as ptc
from motor_info import motor_info
class TestTachoMotorAddressValue(ptc.ParameterizedTestCase):
def test_address_value(self):
self.assertEqual(self._param['motor'].address, self._param['port'])
def test_address_value_is_read_only(self):
with self.assertRaises(AttributeError):
self._param['motor'].address = "ThisShouldNotWork"
class TestTachoMotorCommandsValue(ptc.ParameterizedTestCase):
def test_commands_value(self):
self.assertTrue(self._param['motor'].commands == self._param['commands'])
def test_commands_value_is_read_only(self):
with self.assertRaises(AttributeError):
self._param['motor'].commands = "ThisShouldNotWork"
class TestTachoMotorCountPerRotValue(ptc.ParameterizedTestCase):
def test_count_per_rot_value(self):
self.assertEqual(self._param['motor'].count_per_rot,
motor_info[self._param['motor'].driver_name]['count_per_rot'])
def test_count_per_rot_value_is_read_only(self):
with self.assertRaises(AttributeError):
self._param['motor'].count_per_rot = "ThisShouldNotWork"
class TestTachoMotorCountPerMValue(ptc.ParameterizedTestCase):
def test_count_per_m_value(self):
self.assertEqual(self._param['motor'].count_per_m, motor_info[self._param['motor'].driver_name]['count_per_m'])
def test_count_per_m_value_is_read_only(self):
with self.assertRaises(AttributeError):
self._param['motor'].count_per_m = "ThisShouldNotWork"
class TestTachoMotorFullTravelCountValue(ptc.ParameterizedTestCase):
def test_full_travel_count_value(self):
self.assertEqual(self._param['motor'].full_travel_count,
motor_info[self._param['motor'].driver_name]['full_travel_count'])
def test_full_travel_count_value_is_read_only(self):
with self.assertRaises(AttributeError):
self._param['motor'].count_per_m = "ThisShouldNotWork"
class TestTachoMotorDriverNameValue(ptc.ParameterizedTestCase):
def test_driver_name_value(self):
self.assertEqual(self._param['motor'].driver_name, self._param['driver_name'])
def test_driver_name_value_is_read_only(self):
with self.assertRaises(AttributeError):
self._param['motor'].driver_name = "ThisShouldNotWork"
class TestTachoMotorDutyCycleValue(ptc.ParameterizedTestCase):
def test_duty_cycle_value_is_read_only(self):
with self.assertRaises(AttributeError):
self._param['motor'].duty_cycle = "ThisShouldNotWork"
def test_duty_cycle_value_after_reset(self):
self._param['motor'].command = 'reset'
self.assertEqual(self._param['motor'].duty_cycle, 0)
class TestTachoMotorDutyCycleSpValue(ptc.ParameterizedTestCase):
def test_duty_cycle_sp_large_negative(self):
with self.assertRaises(IOError):
self._param['motor'].duty_cycle_sp = -101
def test_duty_cycle_sp_max_negative(self):
self._param['motor'].duty_cycle_sp = -100
self.assertEqual(self._param['motor'].duty_cycle_sp, -100)
def test_duty_cycle_sp_min_negative(self):
self._param['motor'].duty_cycle_sp = -1
self.assertEqual(self._param['motor'].duty_cycle_sp, -1)
def test_duty_cycle_sp_zero(self):
self._param['motor'].duty_cycle_sp = 0
self.assertEqual(self._param['motor'].duty_cycle_sp, 0)
def test_duty_cycle_sp_min_positive(self):
self._param['motor'].duty_cycle_sp = 1
self.assertEqual(self._param['motor'].duty_cycle_sp, 1)
def test_duty_cycle_sp_max_positive(self):
self._param['motor'].duty_cycle_sp = 100
self.assertEqual(self._param['motor'].duty_cycle_sp, 100)
def test_duty_cycle_sp_large_positive(self):
with self.assertRaises(IOError):
self._param['motor'].duty_cycle_sp = 101
def test_duty_cycle_sp_after_reset(self):
self._param['motor'].duty_cycle_sp = 100
self.assertEqual(self._param['motor'].duty_cycle_sp, 100)
self._param['motor'].command = 'reset'
self.assertEqual(self._param['motor'].duty_cycle_sp, 0)
class TestTachoMotorMaxSpeedValue(ptc.ParameterizedTestCase):
def test_max_speed_value(self):
self.assertEqual(self._param['motor'].max_speed, motor_info[self._param['motor'].driver_name]['max_speed'])
def test_max_speed_value_is_read_only(self):
with self.assertRaises(AttributeError):
self._param['motor'].max_speed = "ThisShouldNotWork"
class TestTachoMotorPositionPValue(ptc.ParameterizedTestCase):
def test_position_p_negative(self):
with self.assertRaises(IOError):
self._param['motor'].position_p = -1
def test_position_p_zero(self):
self._param['motor'].position_p = 0
self.assertEqual(self._param['motor'].position_p, 0)
def test_position_p_positive(self):
self._param['motor'].position_p = 1
self.assertEqual(self._param['motor'].position_p, 1)
def test_position_p_after_reset(self):
self._param['motor'].position_p = 1
self._param['motor'].command = 'reset'
if self._param['hold_pid']:
expected = self._param['hold_pid']['kP']
else:
expected = motor_info[self._param['motor'].driver_name]['position_p']
self.assertEqual(self._param['motor'].position_p, expected)
class TestTachoMotorPositionIValue(ptc.ParameterizedTestCase):
def test_position_i_negative(self):
with self.assertRaises(IOError):
self._param['motor'].position_i = -1
def test_position_i_zero(self):
self._param['motor'].position_i = 0
self.assertEqual(self._param['motor'].position_i, 0)
def test_position_i_positive(self):
self._param['motor'].position_i = 1
self.assertEqual(self._param['motor'].position_i, 1)
def test_position_i_after_reset(self):
self._param['motor'].position_i = 1
self._param['motor'].command = 'reset'
if self._param['hold_pid']:
expected = self._param['hold_pid']['kI']
else:
expected = motor_info[self._param['motor'].driver_name]['position_i']
self.assertEqual(self._param['motor'].position_i, expected)
class TestTachoMotorPositionDValue(ptc.ParameterizedTestCase):
def test_position_d_negative(self):
with self.assertRaises(IOError):
self._param['motor'].position_d = -1
def test_position_d_zero(self):
self._param['motor'].position_d = 0
self.assertEqual(self._param['motor'].position_d, 0)
def test_position_d_positive(self):
self._param['motor'].position_d = 1
self.assertEqual(self._param['motor'].position_d, 1)
def test_position_d_after_reset(self):
self._param['motor'].position_d = 1
self._param['motor'].command = 'reset'
if self._param['hold_pid']:
expected = self._param['hold_pid']['kD']
else:
expected = motor_info[self._param['motor'].driver_name]['position_d']
self.assertEqual(self._param['motor'].position_d, expected)
class TestTachoMotorPolarityValue(ptc.ParameterizedTestCase):
def test_polarity_normal_value(self):
self._param['motor'].polarity = 'normal'
self.assertEqual(self._param['motor'].polarity, 'normal')
def test_polarity_inversed_value(self):
self._param['motor'].polarity = 'inversed'
self.assertEqual(self._param['motor'].polarity, 'inversed')
def test_polarity_illegal_value(self):
with self.assertRaises(IOError):
self._param['motor'].polarity = "ThisShouldNotWork"
def test_polarity_after_reset(self):
if 'normal' == motor_info[self._param['motor'].driver_name]['polarity']:
self._param['motor'].polarity = 'inversed'
else:
self._param['motor'].polarity = 'normal'
self._param['motor'].command = 'reset'
if 'normal' == motor_info[self._param['motor'].driver_name]['polarity']:
self.assertEqual(self._param['motor'].polarity, 'normal')
else:
self.assertEqual(self._param['motor'].polarity, 'inversed')
class TestTachoMotorPositionValue(ptc.ParameterizedTestCase):
def test_position_large_negative(self):
self._param['motor'].position = -1000000
self.assertEqual(self._param['motor'].position, -1000000)
def test_position_min_negative(self):
self._param['motor'].position = -1
self.assertEqual(self._param['motor'].position, -1)
def test_position_zero(self):
self._param['motor'].position = 0
self.assertEqual(self._param['motor'].position, 0)
def test_position_min_positive(self):
self._param['motor'].position = 1
self.assertEqual(self._param['motor'].position, 1)
def test_position_large_positive(self):
self._param['motor'].position = 1000000
self.assertEqual(self._param['motor'].position, 1000000)
def test_position_after_reset(self):
self._param['motor'].position = 100
self.assertEqual(self._param['motor'].position, 100)
self._param['motor'].command = 'reset'
self.assertEqual(self._param['motor'].position, 0)
class TestTachoMotorPositionSpValue(ptc.ParameterizedTestCase):
def test_position_sp_large_negative(self):
self._param['motor'].position_sp = -1000000
self.assertEqual(self._param['motor'].position_sp, -1000000)
def test_position_sp_min_negative(self):
self._param['motor'].position_sp = -1
self.assertEqual(self._param['motor'].position_sp, -1)
def test_position_sp_zero(self):
self._param['motor'].position_sp = 0
self.assertEqual(self._param['motor'].position_sp, 0)
def test_position_sp_min_positive(self):
self._param['motor'].position_sp = 1
self.assertEqual(self._param['motor'].position_sp, 1)
def test_position_sp_large_positive(self):
self._param['motor'].position_sp = 1000000
self.assertEqual(self._param['motor'].position_sp, 1000000)
def test_position_sp_after_reset(self):
self._param['motor'].position_sp = 100
self.assertEqual(self._param['motor'].position_sp, 100)
self._param['motor'].command = 'reset'
self.assertEqual(self._param['motor'].position_sp, 0)
class TestTachoMotorRampDownSpValue(ptc.ParameterizedTestCase):
def test_ramp_down_sp_negative_value(self):
with self.assertRaises(IOError):
self._param['motor'].ramp_down_sp = -1
def test_ramp_down_sp_zero(self):
self._param['motor'].ramp_down_sp = 0
self.assertEqual(self._param['motor'].ramp_down_sp, 0)
def test_ramp_down_sp_min_positive(self):
self._param['motor'].ramp_down_sp = 1
self.assertEqual(self._param['motor'].ramp_down_sp, 1)
def test_ramp_down_sp_max_positive(self):
self._param['motor'].ramp_down_sp = 60000
self.assertEqual(self._param['motor'].ramp_down_sp, 60000)
def test_ramp_down_sp_large_positive(self):
with self.assertRaises(IOError):
self._param['motor'].ramp_down_sp = 60001
def test_ramp_down_sp_after_reset(self):
self._param['motor'].ramp_down_sp = 100
self.assertEqual(self._param['motor'].ramp_down_sp, 100)
self._param['motor'].command = 'reset'
self.assertEqual(self._param['motor'].ramp_down_sp, 0)
class TestTachoMotorRampUpSpValue(ptc.ParameterizedTestCase):
def test_ramp_up_negative_value(self):
with self.assertRaises(IOError):
self._param['motor'].ramp_up_sp = -1
def test_ramp_up_sp_zero(self):
self._param['motor'].ramp_up_sp = 0
self.assertEqual(self._param['motor'].ramp_up_sp, 0)
def test_ramp_up_sp_min_positive(self):
self._param['motor'].ramp_up_sp = 1
self.assertEqual(self._param['motor'].ramp_up_sp, 1)
def test_ramp_up_sp_max_positive(self):
self._param['motor'].ramp_up_sp = 60000
self.assertEqual(self._param['motor'].ramp_up_sp, 60000)
def test_ramp_up_sp_large_positive(self):
with self.assertRaises(IOError):
self._param['motor'].ramp_up_sp = 60001
def test_ramp_up_sp_after_reset(self):
self._param['motor'].ramp_up_sp = 100
self.assertEqual(self._param['motor'].ramp_up_sp, 100)
self._param['motor'].command = 'reset'
self.assertEqual(self._param['motor'].ramp_up_sp, 0)
class TestTachoMotorSpeedValue(ptc.ParameterizedTestCase):
def test_speed_value_is_read_only(self):
with self.assertRaises(AttributeError):
self._param['motor'].speed = 1
def test_speed_value_after_reset(self):
self._param['motor'].command = 'reset'
self.assertEqual(self._param['motor'].speed, 0)
class TestTachoMotorSpeedSpValue(ptc.ParameterizedTestCase):
def test_speed_sp_large_negative(self):
with self.assertRaises(IOError):
self._param['motor'].speed_sp = -(motor_info[self._param['motor'].driver_name]['max_speed'] + 1)
def test_speed_sp_max_negative(self):
self._param['motor'].speed_sp = -motor_info[self._param['motor'].driver_name]['max_speed']
self.assertEqual(self._param['motor'].speed_sp, -motor_info[self._param['motor'].driver_name]['max_speed'])
def test_speed_sp_min_negative(self):
self._param['motor'].speed_sp = -1
self.assertEqual(self._param['motor'].speed_sp, -1)
def test_speed_sp_zero(self):
self._param['motor'].speed_sp = 0
self.assertEqual(self._param['motor'].speed_sp, 0)
def test_speed_sp_min_positive(self):
self._param['motor'].speed_sp = 1
self.assertEqual(self._param['motor'].speed_sp, 1)
def test_speed_sp_max_positive(self):
self._param['motor'].speed_sp = (motor_info[self._param['motor'].driver_name]['max_speed'])
self.assertEqual(self._param['motor'].speed_sp, motor_info[self._param['motor'].driver_name]['max_speed'])
def test_speed_sp_large_positive(self):
with self.assertRaises(IOError):
self._param['motor'].speed_sp = motor_info[self._param['motor'].driver_name]['max_speed'] + 1
def test_speed_sp_after_reset(self):
self._param['motor'].speed_sp = motor_info[self._param['motor'].driver_name]['max_speed'] / 2
self.assertEqual(self._param['motor'].speed_sp, motor_info[self._param['motor'].driver_name]['max_speed'] / 2)
self._param['motor'].command = 'reset'
self.assertEqual(self._param['motor'].speed_sp, 0)
class TestTachoMotorSpeedPValue(ptc.ParameterizedTestCase):
def test_speed_i_negative(self):
with self.assertRaises(IOError):
self._param['motor'].speed_p = -1
def test_speed_p_zero(self):
self._param['motor'].speed_p = 0
self.assertEqual(self._param['motor'].speed_p, 0)
def test_speed_p_positive(self):
self._param['motor'].speed_p = 1
self.assertEqual(self._param['motor'].speed_p, 1)
def test_speed_p_after_reset(self):
self._param['motor'].speed_p = 1
self._param['motor'].command = 'reset'
if self._param['speed_pid']:
expected = self._param['speed_pid']['kP']
else:
expected = motor_info[self._param['motor'].driver_name]['speed_p']
self.assertEqual(self._param['motor'].speed_p, expected)
class TestTachoMotorSpeedIValue(ptc.ParameterizedTestCase):
def test_speed_i_negative(self):
with self.assertRaises(IOError):
self._param['motor'].speed_i = -1
def test_speed_i_zero(self):
self._param['motor'].speed_i = 0
self.assertEqual(self._param['motor'].speed_i, 0)
def test_speed_i_positive(self):
self._param['motor'].speed_i = 1
self.assertEqual(self._param['motor'].speed_i, 1)
def test_speed_i_after_reset(self):
self._param['motor'].speed_i = 1
self._param['motor'].command = 'reset'
if self._param['speed_pid']:
expected = self._param['speed_pid']['kI']
else:
expected = motor_info[self._param['motor'].driver_name]['speed_i']
self.assertEqual(self._param['motor'].speed_i, expected)
class TestTachoMotorSpeedDValue(ptc.ParameterizedTestCase):
def test_speed_d_negative(self):
with self.assertRaises(IOError):
self._param['motor'].speed_d = -1
def test_speed_d_zero(self):
self._param['motor'].speed_d = 0
self.assertEqual(self._param['motor'].speed_d, 0)
def test_speed_d_positive(self):
self._param['motor'].speed_d = 1
self.assertEqual(self._param['motor'].speed_d, 1)
def test_speed_d_after_reset(self):
self._param['motor'].speed_d = 1
self._param['motor'].command = 'reset'
if self._param['speed_pid']:
expected = self._param['speed_pid']['kD']
else:
expected = motor_info[self._param['motor'].driver_name]['speed_d']
self.assertEqual(self._param['motor'].speed_d, expected)
class TestTachoMotorStateValue(ptc.ParameterizedTestCase):
def test_state_value_is_read_only(self):
with self.assertRaises(AttributeError):
self._param['motor'].state = 'ThisShouldNotWork'
def test_state_value_after_reset(self):
self._param['motor'].command = 'reset'
self.assertEqual(self._param['motor'].state, [])
class TestTachoMotorStopActionValue(ptc.ParameterizedTestCase):
def test_stop_action_illegal(self):
with self.assertRaises(IOError):
self._param['motor'].stop_action = 'ThisShouldNotWork'
def test_stop_action_coast(self):
if 'coast' in self._param['stop_actions']:
self._param['motor'].stop_action = 'coast'
self.assertEqual(self._param['motor'].stop_action, 'coast')
else:
with self.assertRaises(IOError):
self._param['motor'].stop_action = 'coast'
def test_stop_action_brake(self):
if 'brake' in self._param['stop_actions']:
self._param['motor'].stop_action = 'brake'
self.assertEqual(self._param['motor'].stop_action, 'brake')
else:
with self.assertRaises(IOError):
self._param['motor'].stop_action = 'brake'
def test_stop_action_hold(self):
if 'hold' in self._param['stop_actions']:
self._param['motor'].stop_action = 'hold'
self.assertEqual(self._param['motor'].stop_action, 'hold')
else:
with self.assertRaises(IOError):
self._param['motor'].stop_action = 'hold'
def test_stop_action_after_reset(self):
action = 1
# controller may only support one stop action
if len(self._param['stop_actions']) < 2:
action = 0
self._param['motor'].stop_action = self._param['stop_actions'][action]
self._param['motor'].action = 'reset'
self.assertEqual(self._param['motor'].stop_action, self._param['stop_actions'][0])
class TestTachoMotorStopActionsValue(ptc.ParameterizedTestCase):
def test_stop_actions_value(self):
self.assertTrue(self._param['motor'].stop_actions == self._param['stop_actions'])
def test_stop_actions_value_is_read_only(self):
with self.assertRaises(AttributeError):
self._param['motor'].stop_actions = "ThisShouldNotWork"
class TestTachoMotorTimeSpValue(ptc.ParameterizedTestCase):
def test_time_sp_negative(self):
with self.assertRaises(IOError):
self._param['motor'].time_sp = -1
def test_time_sp_zero(self):
self._param['motor'].time_sp = 0
self.assertEqual(self._param['motor'].time_sp, 0)
def test_time_sp_min_positive(self):
self._param['motor'].time_sp = 1
self.assertEqual(self._param['motor'].time_sp, 1)
def test_time_sp_large_positive(self):
self._param['motor'].time_sp = 1000000
self.assertEqual(self._param['motor'].time_sp, 1000000)
def test_time_sp_after_reset(self):
self._param['motor'].time_sp = 1
self._param['motor'].command = 'reset'
self.assertEqual(self._param['motor'].time_sp, 0)
ev3_params = {
'motor': ev3.Motor('outA'),
'port': 'outA',
'driver_name': 'lego-ev3-l-motor',
'commands': ['run-forever', 'run-to-abs-pos', 'run-to-rel-pos', 'run-timed', 'run-direct', 'stop', 'reset'],
'stop_actions': ['coast', 'brake', 'hold'],
}
evb_params = {
'motor': ev3.Motor('evb-ports:outA'),
'port': 'evb-ports:outA',
'driver_name': 'lego-ev3-l-motor',
'commands': ['run-forever', 'run-to-abs-pos', 'run-to-rel-pos', 'run-timed', 'run-direct', 'stop', 'reset'],
'stop_actions': ['coast', 'brake', 'hold'],
}
brickpi_params = {
'motor': ev3.Motor('ttyAMA0:MA'),
'port': 'ttyAMA0:MA',
'driver_name': 'lego-nxt-motor',
'commands': ['run-forever', 'run-to-abs-pos', 'run-to-rel-pos', 'run-timed', 'run-direct', 'stop', 'reset'],
'stop_actions': ['coast', 'hold'],
'speed_pid': {
'kP': 1000,
'kI': 60,
'kD': 0
},
'hold_pid': {
'kP': 20000,
'kI': 0,
'kD': 0
},
}
pistorms_params = {
'motor': ev3.Motor('pistorms:BAM1'),
'port': 'pistorms:BAM1',
'driver_name': 'lego-nxt-motor',
'commands': ['run-forever', 'run-to-abs-pos', 'run-to-rel-pos', 'run-timed', 'stop', 'reset'],
'stop_actions': ['coast', 'brake', 'hold'],
'speed_pid': {
'kP': 1000,
'kI': 60,
'kD': 0
},
'hold_pid': {
'kP': 20000,
'kI': 0,
'kD': 0
},
}
paramsA = pistorms_params
paramsA['motor'].command = 'reset'
suite = unittest.TestSuite()
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorAddressValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorCommandsValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorCountPerRotValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorDriverNameValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorDutyCycleSpValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorMaxSpeedValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorPositionPValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorPositionIValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorPositionDValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorPolarityValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorPositionValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorPositionSpValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorRampDownSpValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorRampUpSpValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorSpeedValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorSpeedSpValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorSpeedPValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorSpeedIValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorSpeedDValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorStateValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorStopCommandValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorStopCommandsValue, param=paramsA))
suite.addTest(ptc.ParameterizedTestCase.parameterize(TestTachoMotorTimeSpValue, param=paramsA))
if __name__ == '__main__':
unittest.main(verbosity=2, buffer=True).run(suite)
| mit | -9,058,610,688,001,949,000 | 4,213,575,744,013,751,300 | 38.705691 | 119 | 0.653794 | false |
janmtl/pypsych | tests/data/generators/eprime.py | 1 | 2106 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Script for generating mock EPrime test data
"""
import pandas as pd
import numpy as np
import io
pd.set_option('display.max_rows', 50)
pd.set_option('display.max_columns', 500)
pd.set_option('display.width', 1000)
from pypsych.config import Config
def generate_mock_eprime_data(config_path, task_name, begaze_data, sched_path):
"""Generate mock eprime data based on mock begaze data."""
superconfig = Config(path=config_path)
superconfig.load()
config = superconfig.get_subconfig(task_name, 'EPrime')
bg = begaze_data['merged_labels'][['Condition', 'ID']]
ed = np.random.randint(0, 10, (bg.shape[0], len(config['channels'])))
ep = pd.DataFrame(data=ed, index=bg.index, columns=config['channels'])
df = pd.concat([bg, ep], axis=1, join='inner')
df.rename(columns={'ID': 'Img'}, inplace=True)
result = []
for _, row in df.iterrows():
props = ["\t" + str(idx) + ': ' + str(val)
for idx, val in zip(list(row.index), list(row))]
result.append("\n\n".join(props))
result = ('\n\n\t*** LogFrame End ***\n\n'
'\tLevel: 2\n\n'
'\t*** LogFrame Start ***\n\n').join(result)
prestring = ('*** Header Start ***\n\n'
'GARBAGE\n\n'
'*** Header End ***\n\n'
'\tLevel: 2\n\n'
'\t*** LogFrame Start ***\n\n')
result = prestring + result + '\n\n\t*** LogFrame End ***'
return {'df': df, 'raw': result}
def save_mock_eprime_data(output_path, data, subject_id, task_order, task_name):
"""Save the mock eprime files to output_path."""
base_path = ''.join([output_path,
task_name,
'_',
str(subject_id),
str(task_order)])
raw_path = ''.join([base_path, '_eprime.txt'])
df_path = ''.join([base_path, '_eprime_df.txt'])
with io.open(raw_path, 'w', encoding="utf-16") as f:
f.write(unicode(data['raw']))
data['df'].to_csv(df_path, sep="\t")
pass
| bsd-3-clause | 3,000,479,876,332,264,000 | 2,681,049,899,958,798,300 | 32.967742 | 80 | 0.551282 | false |
lbin/nexar-2 | lib/utils/visualization.py | 1 | 3989 | # --------------------------------------------------------
# Tensorflow Faster R-CNN
# Licensed under The MIT License [see LICENSE for details]
# Written by Xinlei Chen
# --------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import PIL.Image as Image
import PIL.ImageColor as ImageColor
import PIL.ImageDraw as ImageDraw
import PIL.ImageFont as ImageFont
STANDARD_COLORS = [
'AliceBlue', 'Chartreuse', 'Aqua', 'Aquamarine', 'Azure', 'Beige', 'Bisque',
'BlanchedAlmond', 'BlueViolet', 'BurlyWood', 'CadetBlue', 'AntiqueWhite',
'Chocolate', 'Coral', 'CornflowerBlue', 'Cornsilk', 'Crimson', 'Cyan',
'DarkCyan', 'DarkGoldenRod', 'DarkGrey', 'DarkKhaki', 'DarkOrange',
'DarkOrchid', 'DarkSalmon', 'DarkSeaGreen', 'DarkTurquoise', 'DarkViolet',
'DeepPink', 'DeepSkyBlue', 'DodgerBlue', 'FireBrick', 'FloralWhite',
'ForestGreen', 'Fuchsia', 'Gainsboro', 'GhostWhite', 'Gold', 'GoldenRod',
'Salmon', 'Tan', 'HoneyDew', 'HotPink', 'IndianRed', 'Ivory', 'Khaki',
'Lavender', 'LavenderBlush', 'LawnGreen', 'LemonChiffon', 'LightBlue',
'LightCoral', 'LightCyan', 'LightGoldenRodYellow', 'LightGray', 'LightGrey',
'LightGreen', 'LightPink', 'LightSalmon', 'LightSeaGreen', 'LightSkyBlue',
'LightSlateGray', 'LightSlateGrey', 'LightSteelBlue', 'LightYellow', 'Lime',
'LimeGreen', 'Linen', 'Magenta', 'MediumAquaMarine', 'MediumOrchid',
'MediumPurple', 'MediumSeaGreen', 'MediumSlateBlue', 'MediumSpringGreen',
'MediumTurquoise', 'MediumVioletRed', 'MintCream', 'MistyRose', 'Moccasin',
'NavajoWhite', 'OldLace', 'Olive', 'OliveDrab', 'Orange', 'OrangeRed',
'Orchid', 'PaleGoldenRod', 'PaleGreen', 'PaleTurquoise', 'PaleVioletRed',
'PapayaWhip', 'PeachPuff', 'Peru', 'Pink', 'Plum', 'PowderBlue', 'Purple',
'Red', 'RosyBrown', 'RoyalBlue', 'SaddleBrown', 'Green', 'SandyBrown',
'SeaGreen', 'SeaShell', 'Sienna', 'Silver', 'SkyBlue', 'SlateBlue',
'SlateGray', 'SlateGrey', 'Snow', 'SpringGreen', 'SteelBlue', 'GreenYellow',
'Teal', 'Thistle', 'Tomato', 'Turquoise', 'Violet', 'Wheat', 'White',
'WhiteSmoke', 'Yellow', 'YellowGreen'
]
NUM_COLORS = len(STANDARD_COLORS)
try:
FONT = ImageFont.truetype('arial.ttf', 24)
except IOError:
FONT = ImageFont.load_default()
def _draw_single_box(image, xmin, ymin, xmax, ymax, display_str, font, color='black', thickness=4):
draw = ImageDraw.Draw(image)
(left, right, top, bottom) = (xmin, xmax, ymin, ymax)
draw.line([(left, top), (left, bottom), (right, bottom),
(right, top), (left, top)], width=thickness, fill=color)
text_bottom = bottom
# Reverse list and print from bottom to top.
text_width, text_height = font.getsize(display_str)
margin = np.ceil(0.05 * text_height)
draw.rectangle(
[(left, text_bottom - text_height - 2 * margin), (left + text_width,
text_bottom)],
fill=color)
draw.text(
(left + margin, text_bottom - text_height - margin),
display_str,
fill='black',
font=font)
return image
def draw_bounding_boxes(image, gt_boxes, im_info):
num_boxes = gt_boxes.shape[0]
gt_boxes_new = gt_boxes.copy()
gt_boxes_new[:,:4] = np.round(gt_boxes_new[:,:4].copy() / im_info[2])
disp_image = Image.fromarray(np.uint8(image[0]))
for i in xrange(num_boxes):
this_class = int(gt_boxes_new[i, 4])
disp_image = _draw_single_box(disp_image,
gt_boxes_new[i, 0],
gt_boxes_new[i, 1],
gt_boxes_new[i, 2],
gt_boxes_new[i, 3],
'N%02d-C%02d' % (i, this_class),
FONT,
color=STANDARD_COLORS[this_class % NUM_COLORS])
image[0, :] = np.array(disp_image)
return image
| mit | -1,528,800,780,319,452,200 | -6,949,081,347,972,233,000 | 44.329545 | 99 | 0.598646 | false |
liangjiaxing/sympy | sympy/galgebra/stringarrays.py | 50 | 3306 | # sympy/galgebra/stringarrays.py
"""
stringarrays.py are a group of helper functions to convert string
input to vector and multivector class function to arrays of SymPy
symbols.
"""
import operator
from sympy.core.compatibility import reduce
from itertools import combinations
from sympy import S, Symbol, Function
from sympy.core.compatibility import range
def str_array(base, n=None):
"""
Generate one dimensional (list of strings) or two dimensional (list
of list of strings) string array.
For one dimensional arrays: -
base is string of variable names separated by blanks such as
base = 'a b c' which produces the string list ['a','b','c'] or
it is a string with no blanks than in conjunction with the
integer n generates -
str_array('v',n=-3) = ['v_1','v_2','v_3']
str_array('v',n=3) = ['v__1','v__2','v__3'].
In the case of LaTeX printing the '_' would give a subscript and
the '__' a super script.
For two dimensional arrays: -
base is string where elements are separated by spaces and rows by
commas so that -
str_array('a b,c d') = [['a','b'],['c','d']]
"""
if n is None:
if ',' in base:
base_array = []
base_split = base.split(',')
for base_arg in base_split:
base_array.append(list(filter(lambda x: x != '', base_arg.split(' '))))
return base_array
else:
return base.split(' ')
result = []
if isinstance(n, str):
if n[0] == '-':
for index in n[1:].split(' '):
result.append(base + '_' + index)
if n[0] == '+':
for index in n[1:].split(' '):
result.append(base + '__' + index)
if n > 0:
for i in range(1, n + 1):
result.append(base + '__' + str(i))
if n < 0:
for i in range(1, -n + 1):
result.append(base + '_' + str(i))
return result
def symbol_array(base, n=None):
"""
Generates a string arrary with str_array and replaces each string in
array with Symbol of same name.
"""
symbol_str_lst = str_array(base, n)
result = []
for symbol_str in symbol_str_lst:
result.append(S(symbol_str))
return tuple(result)
def fct_sym_array(str_lst, coords=None):
"""
Construct list of symbols or functions with names in 'str_lst'. If
'coords' are given (tuple of symbols) function list constructed,
otherwise a symbol list is constructed.
"""
if coords is None:
fs_lst = []
for sym_str in str_lst:
fs_lst.append(Symbol(sym_str))
else:
fs_lst = []
for fct_str in str_lst:
fs_lst.append(Function(fct_str)(*coords))
return fs_lst
def str_combinations(base, lst, rank=1, mode='_'):
"""
Construct a list of strings of the form 'base+mode+indexes' where the
indexes are formed by converting 'lst' to a list of strings and then
forming the 'indexes' by concatenating combinations of elements from
'lst' taken 'rank' at a time.
"""
a1 = combinations([str(x) for x in lst], rank)
a2 = [reduce(operator.add, x) for x in a1]
str_lst = [base + mode + x for x in a2]
return str_lst
| bsd-3-clause | 7,601,455,371,532,232,000 | -8,384,795,817,120,792,000 | 29.330275 | 87 | 0.58046 | false |
DXCanas/content-curation | contentcuration/contentcuration/viewsets/channelset.py | 1 | 4021 | from django.core.exceptions import PermissionDenied
from django.db.models import CharField
from django.db.models import Q
from rest_framework import serializers
from rest_framework.permissions import IsAuthenticated
from contentcuration.models import Channel
from contentcuration.models import ChannelSet
from contentcuration.viewsets.base import BulkListSerializer
from contentcuration.viewsets.base import BulkModelSerializer
from contentcuration.viewsets.base import ValuesViewset
from contentcuration.viewsets.common import DistinctNotNullArrayAgg
from contentcuration.viewsets.sync.constants import CHANNELSET
from contentcuration.viewsets.sync.utils import generate_update_event
class ChannelSetSerializer(BulkModelSerializer):
channels = serializers.PrimaryKeyRelatedField(
many=True, queryset=Channel.objects.all()
)
def validate_channels(self, value):
"""
Check that the user has permission to view these channels
"""
try:
# Some users might not want to add channels right away
if value:
self.context["request"].user.can_view_channel_ids([v.pk for v in value])
except (PermissionDenied, AttributeError, KeyError):
raise serializers.ValidationError(
"User does not have permission to view these channels"
)
return value
def create(self, validated_data):
channels = validated_data.pop("channels", [])
if "request" in self.context:
user_id = self.context["request"].user.id
# This has been newly created so add the current user as an editor
validated_data["editors"] = [user_id]
instance = super(ChannelSetSerializer, self).create(validated_data)
for channel in channels:
instance.secret_token.channels.add(channel)
instance.secret_token.save()
self.changes.append(
generate_update_event(
instance.id, CHANNELSET, {"secret_token": instance.secret_token.token},
)
)
return instance
def update(self, instance, validated_data):
channels = validated_data.pop("channels", [])
for channel in channels:
instance.secret_token.channels.add(channel)
instance.secret_token.save()
return super(ChannelSetSerializer, self).update(instance, validated_data)
class Meta:
model = ChannelSet
fields = ("id", "name", "description", "channels")
read_only_fields = ("id",)
list_serializer_class = BulkListSerializer
def clean_channels(item):
return filter(lambda x: x is not None, item["channels"])
class ChannelSetViewSet(ValuesViewset):
queryset = ChannelSet.objects.all()
serializer_class = ChannelSetSerializer
permission_classes = [IsAuthenticated]
values = ("id", "name", "description", "channels", "secret_token__token")
field_map = {"secret_token": "secret_token__token", "channels": clean_channels}
def get_queryset(self):
queryset = ChannelSet.objects.prefetch_related("secret_token").filter(
id__in=ChannelSet.objects.filter(editors=self.request.user)
.distinct()
.values_list("id", flat=True)
)
queryset = queryset.annotate(
channels=DistinctNotNullArrayAgg(
"secret_token__channels__id",
filter=Q(main_tree__published=True, deleted=False),
output_field=CharField(),
)
)
return queryset
def prefetch_queryset(self, queryset):
queryset = queryset.select_related("secret_token")
return queryset
class PublicChannelSetSerializer(BulkModelSerializer):
count = serializers.SerializerMethodField()
def get_count(self, value):
return value.count
class Meta:
model = ChannelSet
fields = ("id", "name", "description", "count")
read_only_fields = ("id", "name", "description", "count")
| mit | -4,811,452,216,481,385,000 | 7,747,429,954,241,493,000 | 35.554545 | 88 | 0.66476 | false |
wemanuel/smry | server-auth/ls/google-cloud-sdk/platform/gsutil/third_party/boto/boto/ec2/elb/securitygroup.py | 152 | 1576 | # Copyright (c) 2010 Reza Lotun http://reza.lotun.name
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class SecurityGroup(object):
def __init__(self, connection=None):
self.name = None
self.owner_alias = None
def __repr__(self):
return 'SecurityGroup(%s, %s)' % (self.name, self.owner_alias)
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'GroupName':
self.name = value
elif name == 'OwnerAlias':
self.owner_alias = value
| apache-2.0 | -2,342,241,235,410,463,000 | 5,356,926,334,182,158,000 | 40.473684 | 74 | 0.712563 | false |
kargakis/test-infra | gubernator/github/main_test.py | 19 | 5233 | #!/usr/bin/env python
# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=no-self-use
"""
To run these tests:
$ pip install webtest nosegae
$ nosetests --with-gae --gae-lib-root ~/google_appengine/
"""
import json
import unittest
import webtest
from google.appengine.ext import deferred
from google.appengine.ext import testbed
import handlers
import main
import models
import secrets
app = webtest.TestApp(main.app)
class TestBase(unittest.TestCase):
def init_stubs(self):
self.testbed.init_memcache_stub()
self.testbed.init_app_identity_stub()
self.testbed.init_urlfetch_stub()
self.testbed.init_blobstore_stub()
self.testbed.init_datastore_v3_stub()
self.testbed.init_taskqueue_stub()
class AppTest(TestBase):
def setUp(self):
self.init_stubs()
self.taskqueue = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME)
secrets.put('github_webhook_secret', 'some_secret', per_host=False)
def get_response(self, event, body):
if isinstance(body, dict):
body = json.dumps(body)
signature = handlers.make_signature(body)
resp = app.post('/webhook', body,
{'X-Github-Event': event,
'X-Hub-Signature': signature})
for task in self.taskqueue.get_filtered_tasks():
deferred.run(task.payload)
return resp
def test_webhook(self):
self.get_response('test', {'action': 'blah'})
hooks = list(models.GithubWebhookRaw.query())
self.assertEqual(len(hooks), 1)
self.assertIsNotNone(hooks[0].timestamp)
def test_webhook_bad_sig(self):
body = json.dumps({'action': 'blah'})
signature = handlers.make_signature(body + 'foo')
app.post('/webhook', body,
{'X-Github-Event': 'test',
'X-Hub-Signature': signature}, status=400)
def test_webhook_missing_sig(self):
app.post('/webhook', '{}',
{'X-Github-Event': 'test'}, status=400)
def test_webhook_unicode(self):
self.get_response('test', {'action': u'blah\u03BA'})
def test_webhook_status(self):
args = {
'name': 'owner/repo',
'sha': '1234',
'context': 'ci',
'state': 'success',
'target_url': 'http://example.com',
'description': 'passed the tests!',
'created_at': '2016-07-07T01:58:09Z',
'updated_at': '2016-07-07T02:03:12Z',
}
self.get_response('status', args)
statuses = list(models.GHStatus.query_for_sha('owner/repo', '1234'))
self.assertEqual(len(statuses), 1)
status = statuses[0]
args['repo'] = args.pop('name')
for key, value in args.iteritems():
status_val = getattr(status, key)
try:
status_val = status_val.strftime('%Y-%m-%dT%H:%M:%SZ')
except AttributeError:
pass
assert status_val == value, '%r != %r' % (getattr(status, key), value)
PR_EVENT_BODY = {
'repository': {'full_name': 'test/test'},
'pull_request': {
'number': 123,
'head': {'sha': 'cafe'},
'updated_at': '2016-07-07T02:03:12+00:00',
'state': 'open',
'user': {'login': 'rmmh'},
'assignees': [{'login': 'spxtr'}],
'title': 'test pr',
},
'action': 'opened',
}
def test_webhook_pr_open(self):
body = json.dumps(self.PR_EVENT_BODY)
self.get_response('pull_request', body)
digest = models.GHIssueDigest.get('test/test', 123)
self.assertTrue(digest.is_pr)
self.assertTrue(digest.is_open)
self.assertEqual(digest.involved, ['rmmh', 'spxtr'])
self.assertEqual(digest.payload['title'], 'test pr')
self.assertEqual(digest.payload['needs_rebase'], False)
def test_webhook_pr_open_and_status(self):
self.get_response('pull_request', self.PR_EVENT_BODY)
self.get_response('status', {
'repository': self.PR_EVENT_BODY['repository'],
'name': self.PR_EVENT_BODY['repository']['full_name'],
'sha': self.PR_EVENT_BODY['pull_request']['head']['sha'],
'context': 'test-ci',
'state': 'success',
'target_url': 'example.com',
'description': 'woop!',
'created_at': '2016-07-07T01:58:09Z',
'updated_at': '2016-07-07T02:03:15Z',
})
digest = models.GHIssueDigest.get('test/test', 123)
self.assertEqual(digest.payload['status'],
{'test-ci': ['success', 'example.com', 'woop!']})
| apache-2.0 | 4,927,030,520,406,819,000 | -6,793,621,159,625,638,000 | 33.427632 | 82 | 0.589337 | false |