repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values | var_hash
int64 -9,223,186,179,200,150,000
9,223,291,175B
| doc_hash
int64 -9,223,304,365,658,930,000
9,223,309,051B
| line_mean
float64 3.5
99.8
| line_max
int64 13
999
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
jszymon/pacal | pacal/vartransforms.py | 1 | 10727 | """Variable transforms. Used for mapping to infinite intervals etc."""
from __future__ import print_function
from numpy import Inf
from numpy import hypot, sqrt, sign
from numpy import array, asfarray, empty_like, isscalar, all, equal
class VarTransform(object):
"""Base class for variable transforms."""
def inv_var_change_with_mask(self, t):
eq = equal.outer(t, self.var_inf)
mask = ~eq.any(axis=-1)
if (~mask).any():
if isscalar(t):
x = 0 # must masked; can pick any value, use 0
else:
t = asfarray(t)
x = empty_like(t)
x[mask] = self.inv_var_change(t[mask])
else:
x = self.inv_var_change(t)
return x, mask
def apply_with_inv_transform(self, f, t, def_val = 0, mul_by_deriv = False):
"""Apply function f to vartransform of t.
Accepts vector inputs. Values at infinity are set to def_val."""
x, mask = self.inv_var_change_with_mask(t)
if (~mask).any():
if isscalar(x):
y = def_val
else:
y = empty_like(x)
y[mask] = f(x[mask])
if mul_by_deriv:
y[mask] *= self.inv_var_change_deriv(t[mask])
y[~mask] = def_val
else:
y = f(x)
if mul_by_deriv:
y *= self.inv_var_change_deriv(t)
return y
class VarTransformIdentity(VarTransform):
"""The identity transform."""
def var_change(self, x):
return x
def inv_var_change(self, t):
return t
def inv_var_change_deriv(self, t):
return 1.0
var_min = -1.0
var_max = +1.0
var_inf = [] # parameter values corresponding to infinity. Do
# not distinguish +oo and -oo
### Variable transforms
class VarTransformReciprocal_PMInf(VarTransform):
"""Reciprocal variable transform."""
def __init__(self, exponent = 1):
self.exponent = exponent
def var_change(self, x):
#if x > 0:
# t = x / (x + 1.0)
#else:
# t = x / (1.0 - x)
t = x / (1.0 + abs(x))
return t
def inv_var_change(self, t):
#if t > 0:
# x = t / (1.0 - t)
#else:
# x = t / (1.0 + t)
x = t / (1.0 - abs(t))
return x
def inv_var_change_deriv(self, t):
return 1.0 / ((1.0 - abs(t)) * (1.0 - abs(t)))
var_min = -1.0
var_max = +1.0
var_inf = [-1.0, +1.0] # parameter values corresponding to infinity. Do
# not distinguish +oo and -oo
class VarTransformReciprocal_PInf(VarTransform):
"""Reciprocal variable transform.
Optionally an exponent different from 1 can be specified. If U is
given, than the tranform is into finite interval [L, U]."""
def __init__(self, L = 0, exponent = 1, U = None):
self.exponent = exponent
self.L = L
self.U = U
if self.L == 0:
self.offset = 1.0
else:
self.offset = abs(self.L) / 2
if U is not None:
self.var_min = self.var_change(U)
self.var_inf = []
def var_change(self, x):
#assert all(x >= self.L)
if self.exponent == 1:
t = self.offset / (x - self.L + self.offset)
elif self.exponent == 2:
t = sqrt(self.offset / (x - self.L + self.offset))
else:
t = (self.offset / (x - self.L + self.offset))**(1.0/self.exponent)
return t
def inv_var_change(self, t):
if self.exponent == 1:
x = self.L - self.offset + self.offset / t
else:
x = self.L - self.offset + self.offset / t**self.exponent
return x
def inv_var_change_deriv(self, t):
if self.exponent == 1:
der = self.offset / (t * t)
else:
der = self.offset * float(self.exponent) / t**(self.exponent + 1)
return der
var_min = 0
var_max = 1
var_inf = [0] # parameter values corresponding to infinity. Do
# not distinguish +oo and -oo
class VarTransformReciprocal_MInf(VarTransform):
"""Reciprocal variable transform.
Optionally an exponent different from 1 can be specified. If L is
given, than the tranform is into finite interval [L, U]."""
def __init__(self, U = 0, exponent = 1, L = None):
self.exponent = exponent
self.L = L
self.U = U
if self.U == 0:
self.offset = 1.0
else:
self.offset = abs(self.U) / 2
if L is not None:
self.var_min = self.var_change(L)
self.var_inf = []
def var_change(self, x):
#assert all(x <= self.U)
if self.exponent == 1:
t = -self.offset / (x - self.U - self.offset)
elif self.exponent == 2:
t = sqrt(-self.offset / (x - self.U - self.offset))
else:
t = (self.offset / abs(x - self.U - self.offset))**(1.0/self.exponent)
return t
def inv_var_change(self, t):
if self.exponent == 1:
x = self.U + self.offset - self.offset / t
elif self.exponent == 2:
x = self.U + self.offset - self.offset / (t*t)
else:
x = self.U + self.offset - self.offset / t**self.exponent
return x
def inv_var_change_deriv(self, t):
if self.exponent == 1:
der = self.offset / (t * t)
else:
der = self.offset * float(self.exponent) / t**(self.exponent + 1)
return der
var_min = 0
var_max = 1
var_inf = [0] # parameter values corresponding to infinity. Do
# not distinguish +oo and -oo
# variable transforms suggested by Boyd
class VarTransformAlgebraic_PMInf(VarTransform):
"""Variable transform suggested by Boyd.
Leads to Chebyshev rational functions."""
def __init__(self, c = 1):
self.c = c # this corresponds to Boyd's L param
def var_change(self, x):
t = x / hypot(self.c, x)
return t
def inv_var_change(self, t):
x = self.c * t / sqrt(1.0 - t*t)
return x
def inv_var_change_deriv(self, t):
t2 = t * t
der = t2 / sqrt((1.0 - t2)**3) + 1.0 / sqrt(1.0 - t2)
return self.c * der
var_min = -1.0
var_max = +1.0
var_inf = [-1.0, +1.0] # parameter values corresponding to infinity. Do
# not distinguish +oo and -oo
class VarTransformAlgebraic_PInf(VarTransform):
"""Variable transform suggested by Boyd."""
def __init__(self, L = 0, c = 1):
self.L = float(L) # lower bound
self.c = c # this corresponds to Boyd's L param
def var_change(self, x):
#assert all(x >= self.L)
if ~all(x >= self.L):
print("assert all(x >= self.L)")
print(x)
print(x < self.L)
t = (x - self.L - self.c) / (x - self.L + self.c)
return t
def inv_var_change(self, t):
x = self.L + self.c * (1.0 + t) / (1.0 - t)
return x
def inv_var_change_deriv(self, t):
der = 2.0 * self.c / (1.0 - t)**2
return der
var_min = -1.0
var_max = +1.0
var_inf = [+1.0] # parameter values corresponding to infinity. Do
# not distinguish +oo and -oo
class VarTransformAlgebraic_MInf(VarTransform):
"""Variable transform suggested by Boyd."""
def __init__(self, U = 0, c = 1):
self.U = float(U) # upper bound
self.c = c # this corresponds to Boyd's L param
def var_change(self, x):
#assert all(x <= self.U)
if ~all(x <= self.U):
print("assert all(x >= self.L)")
print(x)
print(x < self.U)
t = (-(x - self.U) - self.c) / (-(x - self.U) + self.c)
return t
def inv_var_change(self, t):
x = self.U - self.c * (1.0 + t) / (1.0 - t)
return x
def inv_var_change_deriv(self, t):
der = 2.0 * self.c / (1.0 - t)**2
return der
var_min = -1.0
var_max = +1.0
var_inf = [+1.0] # parameter values corresponding to infinity. Do
# not distinguish +oo and -oo
def plot_transformed(f, vt):
"""A debugging plot of f under variable transfom vt."""
from pylab import plot, show, linspace
T = linspace(vt.var_min, vt.var_max, 1000)
Y = [f(vt.inv_var_change(t)) if t not in vt.var_inf else 0 for t in T]
plot(T, Y, linewidth=5)
def plot_transformed_w_deriv(f, vt):
"""A debugging plot of f under variable transfom vt including the
derivative of inverse transform."""
from pylab import plot, show, linspace
T = linspace(vt.var_min, vt.var_max, 1000)
Y = [f(vt.inv_var_change(t))*vt.inv_var_change_deriv(t) if t not in vt.var_inf else 0 for t in T]
plot(T, Y, linewidth=5)
def plot_invtransformed_tail(f, vt):
from pylab import loglog, show, logspace
X = logspace(1, 50, 1000)
Y = f(vt.var_change(X))
loglog(X, Y)
if __name__ == "__main__":
vt = VarTransformAlgebraic_PMInf()
print(vt.inv_var_change_with_mask(array([-1,0,1])))
print(vt.inv_var_change_with_mask(-1))
print(vt.apply_with_inv_transform(lambda x: x+1, array([-1,0,1])))
print(vt.apply_with_inv_transform(lambda x: x+1, 0))
print(vt.apply_with_inv_transform(lambda x: x+1, -1))
from numpy import exp
from pylab import show
#plot_transformed(lambda x: 1.0/(1+x*x), VarTransformAlgebraic_PInf(1))
#plot_transformed(lambda x: exp(-x*x), VarTransformAlgebraic_PMInf())
#plot_transformed_w_deriv(lambda x: 1.0/(1+x*x), VarTransformAlgebraic_PMInf())
#plot_transformed_w_deriv(lambda x: exp(-x*x), VarTransformAlgebraic_PMInf())
#plot_transformed(lambda x: 1.0/(1+x*x), VarTransformReciprocal_PInf())
#plot_transformed(lambda x: exp(-x*x), VarTransformReciprocal_PInf())
#plot_transformed(lambda x: 1.0/(1+x**1.0), VarTransformReciprocal_PInf())
#plot_transformed(lambda x: 1.0/(1+x**1.2), VarTransformReciprocal_PInf())
#plot_transformed(lambda x: 1.0/(1+x**1.5), VarTransformReciprocal_PInf())
#plot_transformed(lambda x: 1.0/(1+x**2.0), VarTransformReciprocal_PInf())
#plot_transformed(lambda x: 1.0/(1+x**2.0), VarTransformIdentity())
#plot_transformed(lambda x: 1.0/(1+x**2.0), VarTransformReciprocal_PInf(U = 2))
#plot_transformed(lambda x: 1.0/(1+x**2.0), VarTransformReciprocal_MInf())
#plot_transformed(lambda x: 1.0/(1+x**2.0), VarTransformReciprocal_MInf(L = -2))
plot_invtransformed_tail(lambda x: x, VarTransformReciprocal_PInf(L = 10))
plot_invtransformed_tail(lambda x: 1-x, VarTransformAlgebraic_PInf(L = 10))
show()
| gpl-3.0 | 2,268,297,727,222,978,300 | -4,426,222,620,268,468,000 | 35.362712 | 101 | 0.552624 | false |
cwtaylor/viper | viper/modules/pymacho/MachOEncryptionInfoCommand.py | 6 | 1864 | # encoding: utf-8
"""
Copyright 2013 Jérémie BOUTOILLE
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from struct import unpack
from viper.modules.pymacho.MachOLoadCommand import MachOLoadCommand
from viper.modules.pymacho.Utils import green
class MachOEncryptionInfoCommand(MachOLoadCommand):
cryptoff = 0
cryptsize = 0
cryptid = 0
def __init__(self, macho_file=None, cmd=0):
self.cmd = cmd
if macho_file is not None:
self.parse(macho_file)
def parse(self, macho_file):
self.cryptoff, self.cryptsize = unpack('<II', macho_file.read(4*2))
self.cryptid = unpack('<I', macho_file.read(4))[0]
def write(self, macho_file):
before = macho_file.tell()
macho_file.write(pack('<II', self.cmd, 0x0))
macho_file.write(pack('<III', self.cryptoff, self.cryptsize, self.cryptid))
after = macho_file.tell()
macho_file.seek(before+4)
macho_file.write(pack('<I', after-before))
macho_file.seek(after)
def display(self, before=''):
print before + green("[+]")+" LC_ENCRYPTION_INFO"
print before + "\t- cryptoff : 0x%x" % self.cryptoff
print before + "\t- cryptsize : 0x%x" % self.cryptsize
print before + "\t- crypptid : 0x%x" % self.cryptid
| bsd-3-clause | 2,982,770,124,241,652,700 | -8,026,674,052,620,761,000 | 34.132075 | 83 | 0.683136 | false |
node-modules/emoji | bin/create_emoji_js.py | 6 | 2090 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# install pyquery first: $ sudo easy_install pyquery
import os
from pyquery import PyQuery as pq
project_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# d = pq(url='https://raw.github.com/iamcal/php-emoji/master/table.htm')
d = pq(open(project_dir + '/lib/table.htm', 'rb').read())
tr = d('table tr')
content = open(project_dir + '/tpl/emoji_header.js', 'rb').read().decode('utf-8')
for tre in tr[1:]:
tds = pq(tre)('td')
# val, name, Unified DoCoMo KDDI Softbank Google
item = ['', '', '', '', '', '', '']
for index, tde in enumerate(tds):
td = pq(tde)
# <td><span class="emoji emoji2320e3"></span></td>
if index == 0:
val = td('span').attr('class')[11:].decode('utf-8')
else:
val = td.text().decode('utf-8')
source = val
if index != 1 and val != '-':
# convert to str
val = val[2:]
val = val.split(' U+')
val[0] = (r'\U' + '0' * (8 - len(val[0])) + val[0].lower()).decode('unicode-escape')
if len(val) > 1:
val[1] = (r'\U' + '0' * (8 - len(val[1])) + val[1].lower()).decode('unicode-escape')
val = val[0] + val[1]
else:
val = val[0]
if index > 1:
val = [val, source]
item[index] = val
# print item
# unified: [unified_unicode, name, classname, docomo, kddi, softbank, google]
content += u' "' + item[2][0] + '": ["' + item[2][1] + '", "' + item[1] + '", "' + item[0] + '", ["' \
+ item[3][0] + '", "' + item[3][1] + '"], ["' \
+ item[4][0] + '", "' + item[4][1] + '"], ["' \
+ item[5][0] + '", "' + item[5][1] + '"], ["' \
+ item[6][0] + '", "' + item[6][1] + '"]],\n'
content = content[:-2] + u'\n};\n\n'
content += open(project_dir + '/tpl/emoji_footer.js', 'rb').read().decode('utf-8')
f = open(project_dir + '/lib/emoji.js', 'wb')
f.write(content.encode('utf-8'))
f.close()
| mit | 4,264,360,581,995,123,700 | 8,420,792,565,476,038,000 | 35.034483 | 107 | 0.462201 | false |
5t111111/markdown-preview.vim | markdownpreview_lib/pygments/styles/bw.py | 364 | 1355 | # -*- coding: utf-8 -*-
"""
pygments.styles.bw
~~~~~~~~~~~~~~~~~~
Simple black/white only style.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Operator, Generic
class BlackWhiteStyle(Style):
background_color = "#ffffff"
default_style = ""
styles = {
Comment: "italic",
Comment.Preproc: "noitalic",
Keyword: "bold",
Keyword.Pseudo: "nobold",
Keyword.Type: "nobold",
Operator.Word: "bold",
Name.Class: "bold",
Name.Namespace: "bold",
Name.Exception: "bold",
Name.Entity: "bold",
Name.Tag: "bold",
String: "italic",
String.Interpol: "bold",
String.Escape: "bold",
Generic.Heading: "bold",
Generic.Subheading: "bold",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold",
Error: "border:#FF0000"
}
| lgpl-2.1 | 678,815,489,017,644,400 | -2,389,451,711,221,817,000 | 26.653061 | 70 | 0.454613 | false |
mhnatiuk/phd_sociology_of_religion | scrapper/lib/python2.7/site-packages/twisted/conch/insults/helper.py | 30 | 14146 | # -*- test-case-name: twisted.conch.test.test_helper -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Partial in-memory terminal emulator
@author: Jp Calderone
"""
import re, string
from zope.interface import implements
from twisted.internet import defer, protocol, reactor
from twisted.python import log, _textattributes
from twisted.python.deprecate import deprecated, deprecatedModuleAttribute
from twisted.python.versions import Version
from twisted.conch.insults import insults
FOREGROUND = 30
BACKGROUND = 40
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, N_COLORS = range(9)
class _FormattingState(_textattributes._FormattingStateMixin):
"""
Represents the formatting state/attributes of a single character.
Character set, intensity, underlinedness, blinkitude, video
reversal, as well as foreground and background colors made up a
character's attributes.
"""
compareAttributes = (
'charset', 'bold', 'underline', 'blink', 'reverseVideo', 'foreground',
'background', '_subtracting')
def __init__(self, charset=insults.G0, bold=False, underline=False,
blink=False, reverseVideo=False, foreground=WHITE,
background=BLACK, _subtracting=False):
self.charset = charset
self.bold = bold
self.underline = underline
self.blink = blink
self.reverseVideo = reverseVideo
self.foreground = foreground
self.background = background
self._subtracting = _subtracting
@deprecated(Version('Twisted', 13, 1, 0))
def wantOne(self, **kw):
"""
Add a character attribute to a copy of this formatting state.
@param **kw: An optional attribute name and value can be provided with
a keyword argument.
@return: A formatting state instance with the new attribute.
@see: L{DefaultFormattingState._withAttribute}.
"""
k, v = kw.popitem()
return self._withAttribute(k, v)
def toVT102(self):
# Spit out a vt102 control sequence that will set up
# all the attributes set here. Except charset.
attrs = []
if self._subtracting:
attrs.append(0)
if self.bold:
attrs.append(insults.BOLD)
if self.underline:
attrs.append(insults.UNDERLINE)
if self.blink:
attrs.append(insults.BLINK)
if self.reverseVideo:
attrs.append(insults.REVERSE_VIDEO)
if self.foreground != WHITE:
attrs.append(FOREGROUND + self.foreground)
if self.background != BLACK:
attrs.append(BACKGROUND + self.background)
if attrs:
return '\x1b[' + ';'.join(map(str, attrs)) + 'm'
return ''
CharacterAttribute = _FormattingState
deprecatedModuleAttribute(
Version('Twisted', 13, 1, 0),
'Use twisted.conch.insults.text.assembleFormattedText instead.',
'twisted.conch.insults.helper',
'CharacterAttribute')
# XXX - need to support scroll regions and scroll history
class TerminalBuffer(protocol.Protocol):
"""
An in-memory terminal emulator.
"""
implements(insults.ITerminalTransport)
for keyID in ('UP_ARROW', 'DOWN_ARROW', 'RIGHT_ARROW', 'LEFT_ARROW',
'HOME', 'INSERT', 'DELETE', 'END', 'PGUP', 'PGDN',
'F1', 'F2', 'F3', 'F4', 'F5', 'F6', 'F7', 'F8', 'F9',
'F10', 'F11', 'F12'):
exec '%s = object()' % (keyID,)
TAB = '\t'
BACKSPACE = '\x7f'
width = 80
height = 24
fill = ' '
void = object()
def getCharacter(self, x, y):
return self.lines[y][x]
def connectionMade(self):
self.reset()
def write(self, bytes):
"""
Add the given printable bytes to the terminal.
Line feeds in C{bytes} will be replaced with carriage return / line
feed pairs.
"""
for b in bytes.replace('\n', '\r\n'):
self.insertAtCursor(b)
def _currentFormattingState(self):
return _FormattingState(self.activeCharset, **self.graphicRendition)
def insertAtCursor(self, b):
"""
Add one byte to the terminal at the cursor and make consequent state
updates.
If b is a carriage return, move the cursor to the beginning of the
current row.
If b is a line feed, move the cursor to the next row or scroll down if
the cursor is already in the last row.
Otherwise, if b is printable, put it at the cursor position (inserting
or overwriting as dictated by the current mode) and move the cursor.
"""
if b == '\r':
self.x = 0
elif b == '\n':
self._scrollDown()
elif b in string.printable:
if self.x >= self.width:
self.nextLine()
ch = (b, self._currentFormattingState())
if self.modes.get(insults.modes.IRM):
self.lines[self.y][self.x:self.x] = [ch]
self.lines[self.y].pop()
else:
self.lines[self.y][self.x] = ch
self.x += 1
def _emptyLine(self, width):
return [(self.void, self._currentFormattingState())
for i in xrange(width)]
def _scrollDown(self):
self.y += 1
if self.y >= self.height:
self.y -= 1
del self.lines[0]
self.lines.append(self._emptyLine(self.width))
def _scrollUp(self):
self.y -= 1
if self.y < 0:
self.y = 0
del self.lines[-1]
self.lines.insert(0, self._emptyLine(self.width))
def cursorUp(self, n=1):
self.y = max(0, self.y - n)
def cursorDown(self, n=1):
self.y = min(self.height - 1, self.y + n)
def cursorBackward(self, n=1):
self.x = max(0, self.x - n)
def cursorForward(self, n=1):
self.x = min(self.width, self.x + n)
def cursorPosition(self, column, line):
self.x = column
self.y = line
def cursorHome(self):
self.x = self.home.x
self.y = self.home.y
def index(self):
self._scrollDown()
def reverseIndex(self):
self._scrollUp()
def nextLine(self):
"""
Update the cursor position attributes and scroll down if appropriate.
"""
self.x = 0
self._scrollDown()
def saveCursor(self):
self._savedCursor = (self.x, self.y)
def restoreCursor(self):
self.x, self.y = self._savedCursor
del self._savedCursor
def setModes(self, modes):
for m in modes:
self.modes[m] = True
def resetModes(self, modes):
for m in modes:
try:
del self.modes[m]
except KeyError:
pass
def setPrivateModes(self, modes):
"""
Enable the given modes.
Track which modes have been enabled so that the implementations of
other L{insults.ITerminalTransport} methods can be properly implemented
to respect these settings.
@see: L{resetPrivateModes}
@see: L{insults.ITerminalTransport.setPrivateModes}
"""
for m in modes:
self.privateModes[m] = True
def resetPrivateModes(self, modes):
"""
Disable the given modes.
@see: L{setPrivateModes}
@see: L{insults.ITerminalTransport.resetPrivateModes}
"""
for m in modes:
try:
del self.privateModes[m]
except KeyError:
pass
def applicationKeypadMode(self):
self.keypadMode = 'app'
def numericKeypadMode(self):
self.keypadMode = 'num'
def selectCharacterSet(self, charSet, which):
self.charsets[which] = charSet
def shiftIn(self):
self.activeCharset = insults.G0
def shiftOut(self):
self.activeCharset = insults.G1
def singleShift2(self):
oldActiveCharset = self.activeCharset
self.activeCharset = insults.G2
f = self.insertAtCursor
def insertAtCursor(b):
f(b)
del self.insertAtCursor
self.activeCharset = oldActiveCharset
self.insertAtCursor = insertAtCursor
def singleShift3(self):
oldActiveCharset = self.activeCharset
self.activeCharset = insults.G3
f = self.insertAtCursor
def insertAtCursor(b):
f(b)
del self.insertAtCursor
self.activeCharset = oldActiveCharset
self.insertAtCursor = insertAtCursor
def selectGraphicRendition(self, *attributes):
for a in attributes:
if a == insults.NORMAL:
self.graphicRendition = {
'bold': False,
'underline': False,
'blink': False,
'reverseVideo': False,
'foreground': WHITE,
'background': BLACK}
elif a == insults.BOLD:
self.graphicRendition['bold'] = True
elif a == insults.UNDERLINE:
self.graphicRendition['underline'] = True
elif a == insults.BLINK:
self.graphicRendition['blink'] = True
elif a == insults.REVERSE_VIDEO:
self.graphicRendition['reverseVideo'] = True
else:
try:
v = int(a)
except ValueError:
log.msg("Unknown graphic rendition attribute: " + repr(a))
else:
if FOREGROUND <= v <= FOREGROUND + N_COLORS:
self.graphicRendition['foreground'] = v - FOREGROUND
elif BACKGROUND <= v <= BACKGROUND + N_COLORS:
self.graphicRendition['background'] = v - BACKGROUND
else:
log.msg("Unknown graphic rendition attribute: " + repr(a))
def eraseLine(self):
self.lines[self.y] = self._emptyLine(self.width)
def eraseToLineEnd(self):
width = self.width - self.x
self.lines[self.y][self.x:] = self._emptyLine(width)
def eraseToLineBeginning(self):
self.lines[self.y][:self.x + 1] = self._emptyLine(self.x + 1)
def eraseDisplay(self):
self.lines = [self._emptyLine(self.width) for i in xrange(self.height)]
def eraseToDisplayEnd(self):
self.eraseToLineEnd()
height = self.height - self.y - 1
self.lines[self.y + 1:] = [self._emptyLine(self.width) for i in range(height)]
def eraseToDisplayBeginning(self):
self.eraseToLineBeginning()
self.lines[:self.y] = [self._emptyLine(self.width) for i in range(self.y)]
def deleteCharacter(self, n=1):
del self.lines[self.y][self.x:self.x+n]
self.lines[self.y].extend(self._emptyLine(min(self.width - self.x, n)))
def insertLine(self, n=1):
self.lines[self.y:self.y] = [self._emptyLine(self.width) for i in range(n)]
del self.lines[self.height:]
def deleteLine(self, n=1):
del self.lines[self.y:self.y+n]
self.lines.extend([self._emptyLine(self.width) for i in range(n)])
def reportCursorPosition(self):
return (self.x, self.y)
def reset(self):
self.home = insults.Vector(0, 0)
self.x = self.y = 0
self.modes = {}
self.privateModes = {}
self.setPrivateModes([insults.privateModes.AUTO_WRAP,
insults.privateModes.CURSOR_MODE])
self.numericKeypad = 'app'
self.activeCharset = insults.G0
self.graphicRendition = {
'bold': False,
'underline': False,
'blink': False,
'reverseVideo': False,
'foreground': WHITE,
'background': BLACK}
self.charsets = {
insults.G0: insults.CS_US,
insults.G1: insults.CS_US,
insults.G2: insults.CS_ALTERNATE,
insults.G3: insults.CS_ALTERNATE_SPECIAL}
self.eraseDisplay()
def unhandledControlSequence(self, buf):
print 'Could not handle', repr(buf)
def __str__(self):
lines = []
for L in self.lines:
buf = []
length = 0
for (ch, attr) in L:
if ch is not self.void:
buf.append(ch)
length = len(buf)
else:
buf.append(self.fill)
lines.append(''.join(buf[:length]))
return '\n'.join(lines)
class ExpectationTimeout(Exception):
pass
class ExpectableBuffer(TerminalBuffer):
_mark = 0
def connectionMade(self):
TerminalBuffer.connectionMade(self)
self._expecting = []
def write(self, bytes):
TerminalBuffer.write(self, bytes)
self._checkExpected()
def cursorHome(self):
TerminalBuffer.cursorHome(self)
self._mark = 0
def _timeoutExpected(self, d):
d.errback(ExpectationTimeout())
self._checkExpected()
def _checkExpected(self):
s = str(self)[self._mark:]
while self._expecting:
expr, timer, deferred = self._expecting[0]
if timer and not timer.active():
del self._expecting[0]
continue
for match in expr.finditer(s):
if timer:
timer.cancel()
del self._expecting[0]
self._mark += match.end()
s = s[match.end():]
deferred.callback(match)
break
else:
return
def expect(self, expression, timeout=None, scheduler=reactor):
d = defer.Deferred()
timer = None
if timeout:
timer = scheduler.callLater(timeout, self._timeoutExpected, d)
self._expecting.append((re.compile(expression), timer, d))
self._checkExpected()
return d
__all__ = [
'CharacterAttribute', 'TerminalBuffer', 'ExpectableBuffer']
| gpl-2.0 | -2,059,674,799,492,896,500 | -4,644,835,167,772,185,000 | 29.552916 | 86 | 0.568995 | false |
thiagopnts/servo | tests/wpt/web-platform-tests/webdriver/tests/actions/mouse.py | 3 | 4720 | import pytest
from tests.actions.support.mouse import get_center
from tests.actions.support.refine import get_events, filter_dict
from tests.support.asserts import assert_move_to_coordinates
from tests.support.inline import inline
from tests.support.wait import wait
def link_doc(dest):
content = "<a href=\"{}\" id=\"link\">destination</a>".format(dest)
return inline(content)
# TODO use pytest.approx once we upgrade to pytest > 3.0
def approx(n, m, tolerance=1):
return abs(n - m) <= tolerance
def test_click_at_coordinates(session, test_actions_page, mouse_chain):
div_point = {
"x": 82,
"y": 187,
}
mouse_chain \
.pointer_move(div_point["x"], div_point["y"], duration=1000) \
.click() \
.perform()
events = get_events(session)
assert len(events) == 4
assert_move_to_coordinates(div_point, "outer", events)
for e in events:
if e["type"] != "mousedown":
assert e["buttons"] == 0
assert e["button"] == 0
expected = [
{"type": "mousedown", "buttons": 1},
{"type": "mouseup", "buttons": 0},
{"type": "click", "buttons": 0},
]
filtered_events = [filter_dict(e, expected[0]) for e in events]
assert expected == filtered_events[1:]
def test_context_menu_at_coordinates(session, test_actions_page, mouse_chain):
div_point = {
"x": 82,
"y": 187,
}
mouse_chain \
.pointer_move(div_point["x"], div_point["y"]) \
.pointer_down(button=2) \
.pointer_up(button=2) \
.perform()
events = get_events(session)
expected = [
{"type": "mousedown", "button": 2},
{"type": "contextmenu", "button": 2},
]
assert len(events) == 4
filtered_events = [filter_dict(e, expected[0]) for e in events]
mousedown_contextmenu_events = [
x for x in filtered_events
if x["type"] in ["mousedown", "contextmenu"]
]
assert expected == mousedown_contextmenu_events
def test_click_element_center(session, test_actions_page, mouse_chain):
outer = session.find.css("#outer", all=False)
center = get_center(outer.rect)
mouse_chain.click(element=outer).perform()
events = get_events(session)
assert len(events) == 4
event_types = [e["type"] for e in events]
assert ["mousemove", "mousedown", "mouseup", "click"] == event_types
for e in events:
if e["type"] != "mousemove":
assert approx(e["pageX"], center["x"])
assert approx(e["pageY"], center["y"])
assert e["target"] == "outer"
def test_click_navigation(session, url, release_actions):
destination = url("/webdriver/tests/actions/support/test_actions_wdspec.html")
start = link_doc(destination)
def click(link):
mouse_chain = session.actions.sequence(
"pointer", "pointer_id", {"pointerType": "mouse"})
mouse_chain.click(element=link).perform()
session.url = start
error_message = "Did not navigate to %s" % destination
click(session.find.css("#link", all=False))
wait(session, lambda s: s.url == destination, error_message)
# repeat steps to check behaviour after document unload
session.url = start
click(session.find.css("#link", all=False))
wait(session, lambda s: s.url == destination, error_message)
@pytest.mark.parametrize("drag_duration", [0, 300, 800])
@pytest.mark.parametrize("dx, dy",
[(20, 0), (0, 15), (10, 15), (-20, 0), (10, -15), (-10, -15)])
def test_drag_and_drop(session,
test_actions_page,
mouse_chain,
dx,
dy,
drag_duration):
drag_target = session.find.css("#dragTarget", all=False)
initial_rect = drag_target.rect
initial_center = get_center(initial_rect)
# Conclude chain with extra move to allow time for last queued
# coordinate-update of drag_target and to test that drag_target is "dropped".
mouse_chain \
.pointer_move(0, 0, origin=drag_target) \
.pointer_down() \
.pointer_move(dx, dy, duration=drag_duration, origin="pointer") \
.pointer_up() \
.pointer_move(80, 50, duration=100, origin="pointer") \
.perform()
# mouseup that ends the drag is at the expected destination
e = get_events(session)[1]
assert e["type"] == "mouseup"
assert approx(e["pageX"], initial_center["x"] + dx)
assert approx(e["pageY"], initial_center["y"] + dy)
# check resulting location of the dragged element
final_rect = drag_target.rect
assert initial_rect["x"] + dx == final_rect["x"]
assert initial_rect["y"] + dy == final_rect["y"]
| mpl-2.0 | 4,219,630,467,715,626,000 | 2,709,911,992,030,333,000 | 34.488722 | 82 | 0.604873 | false |
johnny-bui/pygments-sablecc | pygments/styles/borland.py | 75 | 1562 | # -*- coding: utf-8 -*-
"""
pygments.styles.borland
~~~~~~~~~~~~~~~~~~~~~~~
Style similar to the style used in the Borland IDEs.
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class BorlandStyle(Style):
"""
Style similar to the style used in the borland IDEs.
"""
default_style = ''
styles = {
Whitespace: '#bbbbbb',
Comment: 'italic #008800',
Comment.Preproc: 'noitalic #008080',
Comment.Special: 'noitalic bold',
String: '#0000FF',
String.Char: '#800080',
Number: '#0000FF',
Keyword: 'bold #000080',
Operator.Word: 'bold',
Name.Tag: 'bold #000080',
Name.Attribute: '#FF0000',
Generic.Heading: '#999999',
Generic.Subheading: '#aaaaaa',
Generic.Deleted: 'bg:#ffdddd #000000',
Generic.Inserted: 'bg:#ddffdd #000000',
Generic.Error: '#aa0000',
Generic.Emph: 'italic',
Generic.Strong: 'bold',
Generic.Prompt: '#555555',
Generic.Output: '#888888',
Generic.Traceback: '#aa0000',
Error: 'bg:#e3d2d2 #a61717'
}
| bsd-2-clause | -2,332,438,604,347,190,300 | -6,898,769,470,613,443,000 | 29.627451 | 70 | 0.498079 | false |
rahuldhote/odoo | addons/crm_partner_assign/report/crm_lead_report.py | 309 | 5104 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
from openerp import tools
from openerp.addons.crm import crm
class crm_lead_report_assign(osv.osv):
""" CRM Lead Report """
_name = "crm.lead.report.assign"
_auto = False
_description = "CRM Lead Report"
_columns = {
'partner_assigned_id':fields.many2one('res.partner', 'Partner', readonly=True),
'grade_id':fields.many2one('res.partner.grade', 'Grade', readonly=True),
'user_id':fields.many2one('res.users', 'User', readonly=True),
'country_id':fields.many2one('res.country', 'Country', readonly=True),
'section_id':fields.many2one('crm.case.section', 'Sales Team', readonly=True),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'date_assign': fields.date('Assign Date', readonly=True),
'create_date': fields.datetime('Create Date', readonly=True),
'delay_open': fields.float('Delay to Assign',digits=(16,2),readonly=True, group_operator="avg",help="Number of Days to open the case"),
'delay_close': fields.float('Delay to Close',digits=(16,2),readonly=True, group_operator="avg",help="Number of Days to close the case"),
'delay_expected': fields.float('Overpassed Deadline',digits=(16,2),readonly=True, group_operator="avg"),
'probability': fields.float('Avg Probability',digits=(16,2),readonly=True, group_operator="avg"),
'probability_max': fields.float('Max Probability',digits=(16,2),readonly=True, group_operator="max"),
'planned_revenue': fields.float('Planned Revenue',digits=(16,2),readonly=True),
'probable_revenue': fields.float('Probable Revenue', digits=(16,2),readonly=True),
'stage_id': fields.many2one ('crm.case.stage', 'Stage', domain="[('section_ids', '=', section_id)]"),
'partner_id': fields.many2one('res.partner', 'Customer' , readonly=True),
'opening_date': fields.datetime('Opening Date', readonly=True),
'date_closed': fields.datetime('Close Date', readonly=True),
'nbr': fields.integer('# of Cases', readonly=True), # TDE FIXME master: rename into nbr_cases
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'priority': fields.selection(crm.AVAILABLE_PRIORITIES, 'Priority'),
'type':fields.selection([
('lead','Lead'),
('opportunity','Opportunity')
],'Type', help="Type is used to separate Leads and Opportunities"),
}
def init(self, cr):
"""
CRM Lead Report
@param cr: the current row, from the database cursor
"""
tools.drop_view_if_exists(cr, 'crm_lead_report_assign')
cr.execute("""
CREATE OR REPLACE VIEW crm_lead_report_assign AS (
SELECT
c.id,
c.date_open as opening_date,
c.date_closed as date_closed,
c.date_assign,
c.user_id,
c.probability,
c.probability as probability_max,
c.stage_id,
c.type,
c.company_id,
c.priority,
c.section_id,
c.partner_id,
c.country_id,
c.planned_revenue,
c.partner_assigned_id,
p.grade_id,
p.date as partner_date,
c.planned_revenue*(c.probability/100) as probable_revenue,
1 as nbr,
c.create_date as create_date,
extract('epoch' from (c.write_date-c.create_date))/(3600*24) as delay_close,
extract('epoch' from (c.date_deadline - c.date_closed))/(3600*24) as delay_expected,
extract('epoch' from (c.date_open-c.create_date))/(3600*24) as delay_open
FROM
crm_lead c
left join res_partner p on (c.partner_assigned_id=p.id)
)""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 2,831,453,104,730,683,000 | -3,965,340,272,270,112,000 | 50.04 | 144 | 0.577978 | false |
fedorpatlin/ansible | lib/ansible/modules/system/osx_defaults.py | 66 | 14482 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, GeekChimp - Franck Nijhof <franck@geekchimp.com>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: osx_defaults
author: Franck Nijhof (@frenck)
short_description: osx_defaults allows users to read, write, and delete Mac OS X user defaults from Ansible
description:
- osx_defaults allows users to read, write, and delete Mac OS X user defaults from Ansible scripts.
Mac OS X applications and other programs use the defaults system to record user preferences and other
information that must be maintained when the applications aren't running (such as default font for new
documents, or the position of an Info panel).
version_added: "2.0"
options:
domain:
description:
- The domain is a domain name of the form com.companyname.appname.
required: false
default: NSGlobalDomain
host:
description:
- The host on which the preference should apply. The special value "currentHost" corresponds to the
"-currentHost" switch of the defaults commandline tool.
required: false
default: null
version_added: "2.1"
key:
description:
- The key of the user preference
required: true
type:
description:
- The type of value to write.
required: false
default: string
choices: [ "array", "bool", "boolean", "date", "float", "int", "integer", "string" ]
array_add:
description:
- Add new elements to the array for a key which has an array as its value.
required: false
default: false
choices: [ "true", "false" ]
value:
description:
- The value to write. Only required when state = present.
required: false
default: null
state:
description:
- The state of the user defaults
required: false
default: present
choices: [ "present", "absent" ]
notes:
- Apple Mac caches defaults. You may need to logout and login to apply the changes.
'''
EXAMPLES = '''
- osx_defaults:
domain: com.apple.Safari
key: IncludeInternalDebugMenu
type: bool
value: true
state: present
- osx_defaults:
domain: NSGlobalDomain
key: AppleMeasurementUnits
type: string
value: Centimeters
state: present
- osx_defaults:
domain: com.apple.screensaver
host: currentHost
key: showClock
type: int
value: 1
- osx_defaults:
key: AppleMeasurementUnits
type: string
value: Centimeters
- osx_defaults:
key: AppleLanguages
type: array
value:
- en
- nl
- osx_defaults:
domain: com.geekchimp.macable
key: ExampleKeyToRemove
state: absent
'''
import datetime
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
# exceptions --------------------------------------------------------------- {{{
class OSXDefaultsException(Exception):
pass
# /exceptions -------------------------------------------------------------- }}}
# class MacDefaults -------------------------------------------------------- {{{
class OSXDefaults(object):
""" Class to manage Mac OS user defaults """
# init ---------------------------------------------------------------- {{{
""" Initialize this module. Finds 'defaults' executable and preps the parameters """
def __init__(self, **kwargs):
# Initial var for storing current defaults value
self.current_value = None
# Just set all given parameters
for key, val in kwargs.items():
setattr(self, key, val)
# Try to find the defaults executable
self.executable = self.module.get_bin_path(
'defaults',
required=False,
opt_dirs=self.path.split(':'),
)
if not self.executable:
raise OSXDefaultsException("Unable to locate defaults executable.")
# When state is present, we require a parameter
if self.state == "present" and self.value is None:
raise OSXDefaultsException("Missing value parameter")
# Ensure the value is the correct type
self.value = self._convert_type(self.type, self.value)
# /init --------------------------------------------------------------- }}}
# tools --------------------------------------------------------------- {{{
""" Converts value to given type """
def _convert_type(self, type, value):
if type == "string":
return str(value)
elif type in ["bool", "boolean"]:
if isinstance(value, basestring):
value = value.lower()
if value in [True, 1, "true", "1", "yes"]:
return True
elif value in [False, 0, "false", "0", "no"]:
return False
raise OSXDefaultsException("Invalid boolean value: {0}".format(repr(value)))
elif type == "date":
try:
return datetime.datetime.strptime(value.split("+")[0].strip(), "%Y-%m-%d %H:%M:%S")
except ValueError:
raise OSXDefaultsException(
"Invalid date value: {0}. Required format yyy-mm-dd hh:mm:ss.".format(repr(value))
)
elif type in ["int", "integer"]:
if not str(value).isdigit():
raise OSXDefaultsException("Invalid integer value: {0}".format(repr(value)))
return int(value)
elif type == "float":
try:
value = float(value)
except ValueError:
raise OSXDefaultsException("Invalid float value: {0}".format(repr(value)))
return value
elif type == "array":
if not isinstance(value, list):
raise OSXDefaultsException("Invalid value. Expected value to be an array")
return value
raise OSXDefaultsException('Type is not supported: {0}'.format(type))
""" Returns a normalized list of commandline arguments based on the "host" attribute """
def _host_args(self):
if self.host is None:
return []
elif self.host == 'currentHost':
return ['-currentHost']
else:
return ['-host', self.host]
""" Returns a list containing the "defaults" executable and any common base arguments """
def _base_command(self):
return [self.executable] + self._host_args()
""" Converts array output from defaults to an list """
@staticmethod
def _convert_defaults_str_to_list(value):
# Split output of defaults. Every line contains a value
value = value.splitlines()
# Remove first and last item, those are not actual values
value.pop(0)
value.pop(-1)
# Remove extra spaces and comma (,) at the end of values
value = [re.sub(',$', '', x.strip(' ')) for x in value]
return value
# /tools -------------------------------------------------------------- }}}
# commands ------------------------------------------------------------ {{{
""" Reads value of this domain & key from defaults """
def read(self):
# First try to find out the type
rc, out, err = self.module.run_command(self._base_command() + ["read-type", self.domain, self.key])
# If RC is 1, the key does not exists
if rc == 1:
return None
# If the RC is not 0, then terrible happened! Ooooh nooo!
if rc != 0:
raise OSXDefaultsException("An error occurred while reading key type from defaults: " + out)
# Ok, lets parse the type from output
type = out.strip().replace('Type is ', '')
# Now get the current value
rc, out, err = self.module.run_command(self._base_command() + ["read", self.domain, self.key])
# Strip output
out = out.strip()
# An non zero RC at this point is kinda strange...
if rc != 0:
raise OSXDefaultsException("An error occurred while reading key value from defaults: " + out)
# Convert string to list when type is array
if type == "array":
out = self._convert_defaults_str_to_list(out)
# Store the current_value
self.current_value = self._convert_type(type, out)
""" Writes value to this domain & key to defaults """
def write(self):
# We need to convert some values so the defaults commandline understands it
if isinstance(self.value, bool):
if self.value:
value = "TRUE"
else:
value = "FALSE"
elif isinstance(self.value, (int, float)):
value = str(self.value)
elif self.array_add and self.current_value is not None:
value = list(set(self.value) - set(self.current_value))
elif isinstance(self.value, datetime.datetime):
value = self.value.strftime('%Y-%m-%d %H:%M:%S')
else:
value = self.value
# When the type is array and array_add is enabled, morph the type :)
if self.type == "array" and self.array_add:
self.type = "array-add"
# All values should be a list, for easy passing it to the command
if not isinstance(value, list):
value = [value]
rc, out, err = self.module.run_command(self._base_command() + ['write', self.domain, self.key, '-' + self.type] + value)
if rc != 0:
raise OSXDefaultsException('An error occurred while writing value to defaults: ' + out)
""" Deletes defaults key from domain """
def delete(self):
rc, out, err = self.module.run_command(self._base_command() + ['delete', self.domain, self.key])
if rc != 0:
raise OSXDefaultsException("An error occurred while deleting key from defaults: " + out)
# /commands ----------------------------------------------------------- }}}
# run ----------------------------------------------------------------- {{{
""" Does the magic! :) """
def run(self):
# Get the current value from defaults
self.read()
# Handle absent state
if self.state == "absent":
if self.current_value is None:
return False
if self.module.check_mode:
return True
self.delete()
return True
# There is a type mismatch! Given type does not match the type in defaults
value_type = type(self.value)
if self.current_value is not None and not isinstance(self.current_value, value_type):
raise OSXDefaultsException("Type mismatch. Type in defaults: " + type(self.current_value).__name__)
# Current value matches the given value. Nothing need to be done. Arrays need extra care
if self.type == "array" and self.current_value is not None and not self.array_add and \
set(self.current_value) == set(self.value):
return False
elif self.type == "array" and self.current_value is not None and self.array_add and \
len(list(set(self.value) - set(self.current_value))) == 0:
return False
elif self.current_value == self.value:
return False
if self.module.check_mode:
return True
# Change/Create/Set given key/value for domain in defaults
self.write()
return True
# /run ---------------------------------------------------------------- }}}
# /class MacDefaults ------------------------------------------------------ }}}
# main -------------------------------------------------------------------- {{{
def main():
module = AnsibleModule(
argument_spec=dict(
domain=dict(
default="NSGlobalDomain",
required=False,
),
host=dict(
default=None,
required=False,
),
key=dict(
default=None,
),
type=dict(
default="string",
required=False,
choices=[
"array",
"bool",
"boolean",
"date",
"float",
"int",
"integer",
"string",
],
),
array_add=dict(
default=False,
required=False,
type='bool',
),
value=dict(
default=None,
required=False,
type='raw'
),
state=dict(
default="present",
required=False,
choices=[
"absent", "present"
],
),
path=dict(
default="/usr/bin:/usr/local/bin",
required=False,
)
),
supports_check_mode=True,
)
domain = module.params['domain']
host = module.params['host']
key = module.params['key']
type = module.params['type']
array_add = module.params['array_add']
value = module.params['value']
state = module.params['state']
path = module.params['path']
try:
defaults = OSXDefaults(module=module, domain=domain, host=host, key=key, type=type,
array_add=array_add, value=value, state=state, path=path)
changed = defaults.run()
module.exit_json(changed=changed)
except OSXDefaultsException:
e = get_exception()
module.fail_json(msg=e.message)
# /main ------------------------------------------------------------------- }}}
if __name__ == '__main__':
main()
| gpl-3.0 | -1,778,427,580,124,932,400 | -8,316,802,860,881,241,000 | 33.15566 | 128 | 0.54364 | false |
codeworldprodigy/lab4 | lib/jinja2/testsuite/filters.py | 394 | 19169 | # -*- coding: utf-8 -*-
"""
jinja2.testsuite.filters
~~~~~~~~~~~~~~~~~~~~~~~~
Tests for the jinja filters.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import unittest
from jinja2.testsuite import JinjaTestCase
from jinja2 import Markup, Environment
from jinja2._compat import text_type, implements_to_string
env = Environment()
class FilterTestCase(JinjaTestCase):
def test_filter_calling(self):
rv = env.call_filter('sum', [1, 2, 3])
self.assert_equal(rv, 6)
def test_capitalize(self):
tmpl = env.from_string('{{ "foo bar"|capitalize }}')
assert tmpl.render() == 'Foo bar'
def test_center(self):
tmpl = env.from_string('{{ "foo"|center(9) }}')
assert tmpl.render() == ' foo '
def test_default(self):
tmpl = env.from_string(
"{{ missing|default('no') }}|{{ false|default('no') }}|"
"{{ false|default('no', true) }}|{{ given|default('no') }}"
)
assert tmpl.render(given='yes') == 'no|False|no|yes'
def test_dictsort(self):
tmpl = env.from_string(
'{{ foo|dictsort }}|'
'{{ foo|dictsort(true) }}|'
'{{ foo|dictsort(false, "value") }}'
)
out = tmpl.render(foo={"aa": 0, "b": 1, "c": 2, "AB": 3})
assert out == ("[('aa', 0), ('AB', 3), ('b', 1), ('c', 2)]|"
"[('AB', 3), ('aa', 0), ('b', 1), ('c', 2)]|"
"[('aa', 0), ('b', 1), ('c', 2), ('AB', 3)]")
def test_batch(self):
tmpl = env.from_string("{{ foo|batch(3)|list }}|"
"{{ foo|batch(3, 'X')|list }}")
out = tmpl.render(foo=list(range(10)))
assert out == ("[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]|"
"[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 'X', 'X']]")
def test_slice(self):
tmpl = env.from_string('{{ foo|slice(3)|list }}|'
'{{ foo|slice(3, "X")|list }}')
out = tmpl.render(foo=list(range(10)))
assert out == ("[[0, 1, 2, 3], [4, 5, 6], [7, 8, 9]]|"
"[[0, 1, 2, 3], [4, 5, 6, 'X'], [7, 8, 9, 'X']]")
def test_escape(self):
tmpl = env.from_string('''{{ '<">&'|escape }}''')
out = tmpl.render()
assert out == '<">&'
def test_striptags(self):
tmpl = env.from_string('''{{ foo|striptags }}''')
out = tmpl.render(foo=' <p>just a small \n <a href="#">'
'example</a> link</p>\n<p>to a webpage</p> '
'<!-- <p>and some commented stuff</p> -->')
assert out == 'just a small example link to a webpage'
def test_filesizeformat(self):
tmpl = env.from_string(
'{{ 100|filesizeformat }}|'
'{{ 1000|filesizeformat }}|'
'{{ 1000000|filesizeformat }}|'
'{{ 1000000000|filesizeformat }}|'
'{{ 1000000000000|filesizeformat }}|'
'{{ 100|filesizeformat(true) }}|'
'{{ 1000|filesizeformat(true) }}|'
'{{ 1000000|filesizeformat(true) }}|'
'{{ 1000000000|filesizeformat(true) }}|'
'{{ 1000000000000|filesizeformat(true) }}'
)
out = tmpl.render()
self.assert_equal(out, (
'100 Bytes|1.0 kB|1.0 MB|1.0 GB|1.0 TB|100 Bytes|'
'1000 Bytes|976.6 KiB|953.7 MiB|931.3 GiB'
))
def test_filesizeformat_issue59(self):
tmpl = env.from_string(
'{{ 300|filesizeformat }}|'
'{{ 3000|filesizeformat }}|'
'{{ 3000000|filesizeformat }}|'
'{{ 3000000000|filesizeformat }}|'
'{{ 3000000000000|filesizeformat }}|'
'{{ 300|filesizeformat(true) }}|'
'{{ 3000|filesizeformat(true) }}|'
'{{ 3000000|filesizeformat(true) }}'
)
out = tmpl.render()
self.assert_equal(out, (
'300 Bytes|3.0 kB|3.0 MB|3.0 GB|3.0 TB|300 Bytes|'
'2.9 KiB|2.9 MiB'
))
def test_first(self):
tmpl = env.from_string('{{ foo|first }}')
out = tmpl.render(foo=list(range(10)))
assert out == '0'
def test_float(self):
tmpl = env.from_string('{{ "42"|float }}|'
'{{ "ajsghasjgd"|float }}|'
'{{ "32.32"|float }}')
out = tmpl.render()
assert out == '42.0|0.0|32.32'
def test_format(self):
tmpl = env.from_string('''{{ "%s|%s"|format("a", "b") }}''')
out = tmpl.render()
assert out == 'a|b'
def test_indent(self):
tmpl = env.from_string('{{ foo|indent(2) }}|{{ foo|indent(2, true) }}')
text = '\n'.join([' '.join(['foo', 'bar'] * 2)] * 2)
out = tmpl.render(foo=text)
assert out == ('foo bar foo bar\n foo bar foo bar| '
'foo bar foo bar\n foo bar foo bar')
def test_int(self):
tmpl = env.from_string('{{ "42"|int }}|{{ "ajsghasjgd"|int }}|'
'{{ "32.32"|int }}')
out = tmpl.render()
assert out == '42|0|32'
def test_join(self):
tmpl = env.from_string('{{ [1, 2, 3]|join("|") }}')
out = tmpl.render()
assert out == '1|2|3'
env2 = Environment(autoescape=True)
tmpl = env2.from_string('{{ ["<foo>", "<span>foo</span>"|safe]|join }}')
assert tmpl.render() == '<foo><span>foo</span>'
def test_join_attribute(self):
class User(object):
def __init__(self, username):
self.username = username
tmpl = env.from_string('''{{ users|join(', ', 'username') }}''')
assert tmpl.render(users=map(User, ['foo', 'bar'])) == 'foo, bar'
def test_last(self):
tmpl = env.from_string('''{{ foo|last }}''')
out = tmpl.render(foo=list(range(10)))
assert out == '9'
def test_length(self):
tmpl = env.from_string('''{{ "hello world"|length }}''')
out = tmpl.render()
assert out == '11'
def test_lower(self):
tmpl = env.from_string('''{{ "FOO"|lower }}''')
out = tmpl.render()
assert out == 'foo'
def test_pprint(self):
from pprint import pformat
tmpl = env.from_string('''{{ data|pprint }}''')
data = list(range(1000))
assert tmpl.render(data=data) == pformat(data)
def test_random(self):
tmpl = env.from_string('''{{ seq|random }}''')
seq = list(range(100))
for _ in range(10):
assert int(tmpl.render(seq=seq)) in seq
def test_reverse(self):
tmpl = env.from_string('{{ "foobar"|reverse|join }}|'
'{{ [1, 2, 3]|reverse|list }}')
assert tmpl.render() == 'raboof|[3, 2, 1]'
def test_string(self):
x = [1, 2, 3, 4, 5]
tmpl = env.from_string('''{{ obj|string }}''')
assert tmpl.render(obj=x) == text_type(x)
def test_title(self):
tmpl = env.from_string('''{{ "foo bar"|title }}''')
assert tmpl.render() == "Foo Bar"
tmpl = env.from_string('''{{ "foo's bar"|title }}''')
assert tmpl.render() == "Foo's Bar"
tmpl = env.from_string('''{{ "foo bar"|title }}''')
assert tmpl.render() == "Foo Bar"
tmpl = env.from_string('''{{ "f bar f"|title }}''')
assert tmpl.render() == "F Bar F"
tmpl = env.from_string('''{{ "foo-bar"|title }}''')
assert tmpl.render() == "Foo-Bar"
tmpl = env.from_string('''{{ "foo\tbar"|title }}''')
assert tmpl.render() == "Foo\tBar"
tmpl = env.from_string('''{{ "FOO\tBAR"|title }}''')
assert tmpl.render() == "Foo\tBar"
def test_truncate(self):
tmpl = env.from_string(
'{{ data|truncate(15, true, ">>>") }}|'
'{{ data|truncate(15, false, ">>>") }}|'
'{{ smalldata|truncate(15) }}'
)
out = tmpl.render(data='foobar baz bar' * 1000,
smalldata='foobar baz bar')
assert out == 'foobar baz barf>>>|foobar baz >>>|foobar baz bar'
def test_upper(self):
tmpl = env.from_string('{{ "foo"|upper }}')
assert tmpl.render() == 'FOO'
def test_urlize(self):
tmpl = env.from_string('{{ "foo http://www.example.com/ bar"|urlize }}')
assert tmpl.render() == 'foo <a href="http://www.example.com/">'\
'http://www.example.com/</a> bar'
def test_wordcount(self):
tmpl = env.from_string('{{ "foo bar baz"|wordcount }}')
assert tmpl.render() == '3'
def test_block(self):
tmpl = env.from_string('{% filter lower|escape %}<HEHE>{% endfilter %}')
assert tmpl.render() == '<hehe>'
def test_chaining(self):
tmpl = env.from_string('''{{ ['<foo>', '<bar>']|first|upper|escape }}''')
assert tmpl.render() == '<FOO>'
def test_sum(self):
tmpl = env.from_string('''{{ [1, 2, 3, 4, 5, 6]|sum }}''')
assert tmpl.render() == '21'
def test_sum_attributes(self):
tmpl = env.from_string('''{{ values|sum('value') }}''')
assert tmpl.render(values=[
{'value': 23},
{'value': 1},
{'value': 18},
]) == '42'
def test_sum_attributes_nested(self):
tmpl = env.from_string('''{{ values|sum('real.value') }}''')
assert tmpl.render(values=[
{'real': {'value': 23}},
{'real': {'value': 1}},
{'real': {'value': 18}},
]) == '42'
def test_sum_attributes_tuple(self):
tmpl = env.from_string('''{{ values.items()|sum('1') }}''')
assert tmpl.render(values={
'foo': 23,
'bar': 1,
'baz': 18,
}) == '42'
def test_abs(self):
tmpl = env.from_string('''{{ -1|abs }}|{{ 1|abs }}''')
assert tmpl.render() == '1|1', tmpl.render()
def test_round_positive(self):
tmpl = env.from_string('{{ 2.7|round }}|{{ 2.1|round }}|'
"{{ 2.1234|round(3, 'floor') }}|"
"{{ 2.1|round(0, 'ceil') }}")
assert tmpl.render() == '3.0|2.0|2.123|3.0', tmpl.render()
def test_round_negative(self):
tmpl = env.from_string('{{ 21.3|round(-1)}}|'
"{{ 21.3|round(-1, 'ceil')}}|"
"{{ 21.3|round(-1, 'floor')}}")
assert tmpl.render() == '20.0|30.0|20.0',tmpl.render()
def test_xmlattr(self):
tmpl = env.from_string("{{ {'foo': 42, 'bar': 23, 'fish': none, "
"'spam': missing, 'blub:blub': '<?>'}|xmlattr }}")
out = tmpl.render().split()
assert len(out) == 3
assert 'foo="42"' in out
assert 'bar="23"' in out
assert 'blub:blub="<?>"' in out
def test_sort1(self):
tmpl = env.from_string('{{ [2, 3, 1]|sort }}|{{ [2, 3, 1]|sort(true) }}')
assert tmpl.render() == '[1, 2, 3]|[3, 2, 1]'
def test_sort2(self):
tmpl = env.from_string('{{ "".join(["c", "A", "b", "D"]|sort) }}')
assert tmpl.render() == 'AbcD'
def test_sort3(self):
tmpl = env.from_string('''{{ ['foo', 'Bar', 'blah']|sort }}''')
assert tmpl.render() == "['Bar', 'blah', 'foo']"
def test_sort4(self):
@implements_to_string
class Magic(object):
def __init__(self, value):
self.value = value
def __str__(self):
return text_type(self.value)
tmpl = env.from_string('''{{ items|sort(attribute='value')|join }}''')
assert tmpl.render(items=map(Magic, [3, 2, 4, 1])) == '1234'
def test_groupby(self):
tmpl = env.from_string('''
{%- for grouper, list in [{'foo': 1, 'bar': 2},
{'foo': 2, 'bar': 3},
{'foo': 1, 'bar': 1},
{'foo': 3, 'bar': 4}]|groupby('foo') -%}
{{ grouper }}{% for x in list %}: {{ x.foo }}, {{ x.bar }}{% endfor %}|
{%- endfor %}''')
assert tmpl.render().split('|') == [
"1: 1, 2: 1, 1",
"2: 2, 3",
"3: 3, 4",
""
]
def test_groupby_tuple_index(self):
tmpl = env.from_string('''
{%- for grouper, list in [('a', 1), ('a', 2), ('b', 1)]|groupby(0) -%}
{{ grouper }}{% for x in list %}:{{ x.1 }}{% endfor %}|
{%- endfor %}''')
assert tmpl.render() == 'a:1:2|b:1|'
def test_groupby_multidot(self):
class Date(object):
def __init__(self, day, month, year):
self.day = day
self.month = month
self.year = year
class Article(object):
def __init__(self, title, *date):
self.date = Date(*date)
self.title = title
articles = [
Article('aha', 1, 1, 1970),
Article('interesting', 2, 1, 1970),
Article('really?', 3, 1, 1970),
Article('totally not', 1, 1, 1971)
]
tmpl = env.from_string('''
{%- for year, list in articles|groupby('date.year') -%}
{{ year }}{% for x in list %}[{{ x.title }}]{% endfor %}|
{%- endfor %}''')
assert tmpl.render(articles=articles).split('|') == [
'1970[aha][interesting][really?]',
'1971[totally not]',
''
]
def test_filtertag(self):
tmpl = env.from_string("{% filter upper|replace('FOO', 'foo') %}"
"foobar{% endfilter %}")
assert tmpl.render() == 'fooBAR'
def test_replace(self):
env = Environment()
tmpl = env.from_string('{{ string|replace("o", 42) }}')
assert tmpl.render(string='<foo>') == '<f4242>'
env = Environment(autoescape=True)
tmpl = env.from_string('{{ string|replace("o", 42) }}')
assert tmpl.render(string='<foo>') == '<f4242>'
tmpl = env.from_string('{{ string|replace("<", 42) }}')
assert tmpl.render(string='<foo>') == '42foo>'
tmpl = env.from_string('{{ string|replace("o", ">x<") }}')
assert tmpl.render(string=Markup('foo')) == 'f>x<>x<'
def test_forceescape(self):
tmpl = env.from_string('{{ x|forceescape }}')
assert tmpl.render(x=Markup('<div />')) == u'<div />'
def test_safe(self):
env = Environment(autoescape=True)
tmpl = env.from_string('{{ "<div>foo</div>"|safe }}')
assert tmpl.render() == '<div>foo</div>'
tmpl = env.from_string('{{ "<div>foo</div>" }}')
assert tmpl.render() == '<div>foo</div>'
def test_urlencode(self):
env = Environment(autoescape=True)
tmpl = env.from_string('{{ "Hello, world!"|urlencode }}')
assert tmpl.render() == 'Hello%2C%20world%21'
tmpl = env.from_string('{{ o|urlencode }}')
assert tmpl.render(o=u"Hello, world\u203d") == "Hello%2C%20world%E2%80%BD"
assert tmpl.render(o=(("f", 1),)) == "f=1"
assert tmpl.render(o=(('f', 1), ("z", 2))) == "f=1&z=2"
assert tmpl.render(o=((u"\u203d", 1),)) == "%E2%80%BD=1"
assert tmpl.render(o={u"\u203d": 1}) == "%E2%80%BD=1"
assert tmpl.render(o={0: 1}) == "0=1"
def test_simple_map(self):
env = Environment()
tmpl = env.from_string('{{ ["1", "2", "3"]|map("int")|sum }}')
self.assertEqual(tmpl.render(), '6')
def test_attribute_map(self):
class User(object):
def __init__(self, name):
self.name = name
env = Environment()
users = [
User('john'),
User('jane'),
User('mike'),
]
tmpl = env.from_string('{{ users|map(attribute="name")|join("|") }}')
self.assertEqual(tmpl.render(users=users), 'john|jane|mike')
def test_empty_map(self):
env = Environment()
tmpl = env.from_string('{{ none|map("upper")|list }}')
self.assertEqual(tmpl.render(), '[]')
def test_simple_select(self):
env = Environment()
tmpl = env.from_string('{{ [1, 2, 3, 4, 5]|select("odd")|join("|") }}')
self.assertEqual(tmpl.render(), '1|3|5')
def test_bool_select(self):
env = Environment()
tmpl = env.from_string('{{ [none, false, 0, 1, 2, 3, 4, 5]|select|join("|") }}')
self.assertEqual(tmpl.render(), '1|2|3|4|5')
def test_simple_reject(self):
env = Environment()
tmpl = env.from_string('{{ [1, 2, 3, 4, 5]|reject("odd")|join("|") }}')
self.assertEqual(tmpl.render(), '2|4')
def test_bool_reject(self):
env = Environment()
tmpl = env.from_string('{{ [none, false, 0, 1, 2, 3, 4, 5]|reject|join("|") }}')
self.assertEqual(tmpl.render(), 'None|False|0')
def test_simple_select_attr(self):
class User(object):
def __init__(self, name, is_active):
self.name = name
self.is_active = is_active
env = Environment()
users = [
User('john', True),
User('jane', True),
User('mike', False),
]
tmpl = env.from_string('{{ users|selectattr("is_active")|'
'map(attribute="name")|join("|") }}')
self.assertEqual(tmpl.render(users=users), 'john|jane')
def test_simple_reject_attr(self):
class User(object):
def __init__(self, name, is_active):
self.name = name
self.is_active = is_active
env = Environment()
users = [
User('john', True),
User('jane', True),
User('mike', False),
]
tmpl = env.from_string('{{ users|rejectattr("is_active")|'
'map(attribute="name")|join("|") }}')
self.assertEqual(tmpl.render(users=users), 'mike')
def test_func_select_attr(self):
class User(object):
def __init__(self, id, name):
self.id = id
self.name = name
env = Environment()
users = [
User(1, 'john'),
User(2, 'jane'),
User(3, 'mike'),
]
tmpl = env.from_string('{{ users|selectattr("id", "odd")|'
'map(attribute="name")|join("|") }}')
self.assertEqual(tmpl.render(users=users), 'john|mike')
def test_func_reject_attr(self):
class User(object):
def __init__(self, id, name):
self.id = id
self.name = name
env = Environment()
users = [
User(1, 'john'),
User(2, 'jane'),
User(3, 'mike'),
]
tmpl = env.from_string('{{ users|rejectattr("id", "odd")|'
'map(attribute="name")|join("|") }}')
self.assertEqual(tmpl.render(users=users), 'jane')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(FilterTestCase))
return suite
| apache-2.0 | 7,650,749,180,376,782,000 | -3,462,294,102,947,084,300 | 36.221359 | 88 | 0.474151 | false |
chriscrosscutler/scikit-image | skimage/data/tests/test_data.py | 21 | 1824 | import numpy as np
import skimage.data as data
from numpy.testing import assert_equal, assert_almost_equal
def test_lena():
""" Test that "Lena" image can be loaded. """
lena = data.lena()
assert_equal(lena.shape, (512, 512, 3))
def test_astronaut():
""" Test that "astronaut" image can be loaded. """
astronaut = data.astronaut()
assert_equal(astronaut.shape, (512, 512, 3))
def test_camera():
""" Test that "camera" image can be loaded. """
cameraman = data.camera()
assert_equal(cameraman.ndim, 2)
def test_checkerboard():
""" Test that "checkerboard" image can be loaded. """
data.checkerboard()
def test_text():
""" Test that "text" image can be loaded. """
data.text()
def test_moon():
""" Test that "moon" image can be loaded. """
data.moon()
def test_page():
""" Test that "page" image can be loaded. """
data.page()
def test_clock():
""" Test that "clock" image can be loaded. """
data.clock()
def test_chelsea():
""" Test that "chelsea" image can be loaded. """
data.chelsea()
def test_coffee():
""" Test that "coffee" image can be loaded. """
data.coffee()
def test_binary_blobs():
blobs = data.binary_blobs(length=128)
assert_almost_equal(blobs.mean(), 0.5, decimal=1)
blobs = data.binary_blobs(length=128, volume_fraction=0.25)
assert_almost_equal(blobs.mean(), 0.25, decimal=1)
blobs = data.binary_blobs(length=32, volume_fraction=0.25, n_dim=3)
assert_almost_equal(blobs.mean(), 0.25, decimal=1)
other_realization = data.binary_blobs(length=32, volume_fraction=0.25,
n_dim=3)
assert not np.all(blobs == other_realization)
if __name__ == "__main__":
from numpy.testing import run_module_suite
run_module_suite()
| bsd-3-clause | -8,220,109,135,333,163,000 | 6,583,343,208,511,384,000 | 23.986301 | 74 | 0.622807 | false |
rajul/mne-python | mne/time_frequency/stft.py | 24 | 6497 | from math import ceil
import numpy as np
from scipy.fftpack import fft, ifft, fftfreq
from ..utils import logger, verbose
@verbose
def stft(x, wsize, tstep=None, verbose=None):
"""STFT Short-Term Fourier Transform using a sine window.
The transformation is designed to be a tight frame that can be
perfectly inverted. It only returns the positive frequencies.
Parameters
----------
x : 2d array of size n_signals x T
containing multi-channels signal
wsize : int
length of the STFT window in samples (must be a multiple of 4)
tstep : int
step between successive windows in samples (must be a multiple of 2,
a divider of wsize and smaller than wsize/2) (default: wsize/2)
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
Returns
-------
X : 3d array of shape [n_signals, wsize / 2 + 1, n_step]
STFT coefficients for positive frequencies with
n_step = ceil(T / tstep)
Examples
--------
X = stft(x, wsize)
X = stft(x, wsize, tstep)
See Also
--------
istft
stftfreq
"""
if not np.isrealobj(x):
raise ValueError("x is not a real valued array")
if x.ndim == 1:
x = x[None, :]
n_signals, T = x.shape
wsize = int(wsize)
# Errors and warnings
if wsize % 4:
raise ValueError('The window length must be a multiple of 4.')
if tstep is None:
tstep = wsize / 2
tstep = int(tstep)
if (wsize % tstep) or (tstep % 2):
raise ValueError('The step size must be a multiple of 2 and a '
'divider of the window length.')
if tstep > wsize / 2:
raise ValueError('The step size must be smaller than half the '
'window length.')
n_step = int(ceil(T / float(tstep)))
n_freq = wsize // 2 + 1
logger.info("Number of frequencies: %d" % n_freq)
logger.info("Number of time steps: %d" % n_step)
X = np.zeros((n_signals, n_freq, n_step), dtype=np.complex)
if n_signals == 0:
return X
# Defining sine window
win = np.sin(np.arange(.5, wsize + .5) / wsize * np.pi)
win2 = win ** 2
swin = np.zeros((n_step - 1) * tstep + wsize)
for t in range(n_step):
swin[t * tstep:t * tstep + wsize] += win2
swin = np.sqrt(wsize * swin)
# Zero-padding and Pre-processing for edges
xp = np.zeros((n_signals, wsize + (n_step - 1) * tstep),
dtype=x.dtype)
xp[:, (wsize - tstep) // 2: (wsize - tstep) // 2 + T] = x
x = xp
for t in range(n_step):
# Framing
wwin = win / swin[t * tstep: t * tstep + wsize]
frame = x[:, t * tstep: t * tstep + wsize] * wwin[None, :]
# FFT
fframe = fft(frame)
X[:, :, t] = fframe[:, :n_freq]
return X
def istft(X, tstep=None, Tx=None):
"""ISTFT Inverse Short-Term Fourier Transform using a sine window
Parameters
----------
X : 3d array of shape [n_signals, wsize / 2 + 1, n_step]
The STFT coefficients for positive frequencies
tstep : int
step between successive windows in samples (must be a multiple of 2,
a divider of wsize and smaller than wsize/2) (default: wsize/2)
Tx : int
Length of returned signal. If None Tx = n_step * tstep
Returns
-------
x : 1d array of length Tx
vector containing the inverse STFT signal
Examples
--------
x = istft(X)
x = istft(X, tstep)
See Also
--------
stft
"""
# Errors and warnings
n_signals, n_win, n_step = X.shape
if (n_win % 2 == 0):
ValueError('The number of rows of the STFT matrix must be odd.')
wsize = 2 * (n_win - 1)
if tstep is None:
tstep = wsize / 2
if wsize % tstep:
raise ValueError('The step size must be a divider of two times the '
'number of rows of the STFT matrix minus two.')
if wsize % 2:
raise ValueError('The step size must be a multiple of 2.')
if tstep > wsize / 2:
raise ValueError('The step size must be smaller than the number of '
'rows of the STFT matrix minus one.')
if Tx is None:
Tx = n_step * tstep
T = n_step * tstep
x = np.zeros((n_signals, T + wsize - tstep), dtype=np.float)
if n_signals == 0:
return x[:, :Tx]
# Defining sine window
win = np.sin(np.arange(.5, wsize + .5) / wsize * np.pi)
# win = win / norm(win);
# Pre-processing for edges
swin = np.zeros(T + wsize - tstep, dtype=np.float)
for t in range(n_step):
swin[t * tstep:t * tstep + wsize] += win ** 2
swin = np.sqrt(swin / wsize)
fframe = np.empty((n_signals, n_win + wsize // 2 - 1), dtype=X.dtype)
for t in range(n_step):
# IFFT
fframe[:, :n_win] = X[:, :, t]
fframe[:, n_win:] = np.conj(X[:, wsize // 2 - 1: 0: -1, t])
frame = ifft(fframe)
wwin = win / swin[t * tstep:t * tstep + wsize]
# Overlap-add
x[:, t * tstep: t * tstep + wsize] += np.real(np.conj(frame) * wwin)
# Truncation
x = x[:, (wsize - tstep) // 2: (wsize - tstep) // 2 + T + 1][:, :Tx].copy()
return x
def stftfreq(wsize, sfreq=None):
"""Frequencies of stft transformation
Parameters
----------
wsize : int
Size of stft window
sfreq : float
Sampling frequency. If None the frequencies are given between 0 and pi
otherwise it's given in Hz.
Returns
-------
freqs : array
The positive frequencies returned by stft
See Also
--------
stft
istft
"""
n_freq = wsize // 2 + 1
freqs = fftfreq(wsize)
freqs = np.abs(freqs[:n_freq])
if sfreq is not None:
freqs *= float(sfreq)
return freqs
def stft_norm2(X):
"""Compute L2 norm of STFT transform
It takes into account that stft only return positive frequencies.
As we use tight frame this quantity is conserved by the stft.
Parameters
----------
X : 3D complex array
The STFT transforms
Returns
-------
norms2 : array
The squared L2 norm of every row of X.
"""
X2 = (X * X.conj()).real
# compute all L2 coefs and remove first and last frequency once.
norms2 = (2. * X2.sum(axis=2).sum(axis=1) - np.sum(X2[:, 0, :], axis=1) -
np.sum(X2[:, -1, :], axis=1))
return norms2
| bsd-3-clause | -8,098,505,674,117,436,000 | 227,061,152,419,016,320 | 26.413502 | 79 | 0.562567 | false |
acosinwork/Arduino | arduino-core/src/processing/app/i18n/python/update.py | 134 | 1464 | #!/usr/bin/env python
#vim:set fileencoding=utf-8 sw=2 expandtab
def unquote(s):
s = s.strip()
if s[0] != '"' or s[-1] != '"':
raise RuntimeError
return s[1:-1]
def read_po(fp):
if isinstance(fp, str):
fp = open(fp)
d = {}
st = 1
comment = key = rkey = rvalue = ''
for line in fp:
if line[0] == '#' or line.strip() == '':
if st == 2:
d[key] = (comment, rkey, rvalue)
st = 1
comment = key = rkey = rvalue = ''
comment += line
elif line[0] == '"':
if st == 1:
key += unquote(line)
rkey += line
else:
rvalue += line
elif line.startswith('msgid '):
st = 1
key = unquote(line[6:])
rkey = line
elif line.startswith('msgstr '):
st = 2
rvalue = line
else:
raise RuntimeError
if st == 2:
d[key] = (comment, rkey, rvalue)
return d
def dump(d, dstFile):
out = open(dstFile, 'w')
# The first block in file should go first because the key is ''.
for key in sorted(d.keys()):
(comment, rkey, rvalue) = d[key]
out.write(comment)
out.write(rkey)
out.write(rvalue)
out.close()
def merge(d, dd):
for key in dd.keys():
if d.has_key(key):
d[key] = dd[key]
return d
# Remove currently unused catalog text lines from ".po" file.
def main():
import sys
d = read_po(sys.stdin)
dd = read_po(sys.argv[1])
dump(merge(d, dd), sys.argv[1])
if __name__ == '__main__':
main()
| lgpl-2.1 | -8,532,543,288,234,087,000 | 4,055,611,668,989,387,000 | 19.619718 | 66 | 0.537568 | false |
mrworf/multiremote | drivers/base.py | 1 | 9963 | # This file is part of multiRemote.
#
# multiRemote is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# multiRemote is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with multiRemote. If not, see <http://www.gnu.org/licenses/>.
#
"""
Simplest driver of all, provides logic for power handling, commands and
simplifications for some of the more nitty-gritty work that all drivers must
do.
It's HIGHLY RECOMMENDED that drivers utilize this class as the base class,
since it provides quite a bit of abstraction and easier power management.
"""
from modules.commandtype import CommandType
import traceback
import logging
import socket
import requests
from xml.etree import ElementTree
from dataclasses import field,make_dataclass
class driverBase:
def __init__(self, *args):
self.power = False
self.COMMAND_HANDLER = {}
self.httpTimeout = 250 # 250ms
self.handlers = []
self.eventManager = None
# Invoke the real init function
self.init(*args)
def setEventManager(self, eventManager):
self.eventManager = eventManager
def init(self):
""" Override to do additional initialization
"""
pass
def eventOn(self):
""" Override to handle power on event
"""
logging.warning("" + repr(self) + " is not implementing power on")
def eventOff(self):
""" Override to handle power off event
"""
logging.warning("" + repr(self) + " is not implementing power off")
def eventExtras(self, keyvalue):
""" Override this to handle extra data
"""
pass
def isAsync(self):
''' Override this to change async behavior, default is True
Async means that multiple instances of this driver can be
used in parallel.
'''
return True
############################################################################
## Functions below provide convenience for the new driver.
############################################################################
def _handleResponse(self, r, contentIsJSON=False, contentIsXML=False):
result = {
'success' : False,
'code': 501,
'content' : None
}
try:
if contentIsJSON:
content = r.json
elif contentIsXML:
content = ElementTree.fromstring(r.content)
else:
content = r.content
result = {
'success' : r.status_code == requests.codes.ok,
'code': r.status_code,
'content' : content
}
except:
logging.exception('Failed to parse result')
return result
def httpGet(self, url, contentIsJSON=False, contentIsXML=False):
result = {
'success' : False,
'code': 500,
'content' : None
}
try:
r = requests.get(url, timeout=self.httpTimeout/1000.0)
print((repr(r.content)))
result = self._handleResponse(r, contentIsXML=contentIsXML, contentIsJSON=contentIsJSON)
except:
logging.exception('HTTP GET failed')
return result
def httpPost(self, url, data = None, contentIsJSON=False, contentIsXML=False):
result = {
'success' : False,
'code': 500,
'content' : None
}
try:
r = requests.post(url, data=data, timeout=self.httpTimeout/1000.0)
self._handleResponse(r, contentIsXML=contentIsXML, contentIsJSON=contentIsJSON)
except:
logging.exception('HTTP POST failed')
return result
def FQDN2IP(self, fqdn, getIPV6 = False):
""" Takes a regular DNS name and resolves it into an IP address instead.
If you provide an IP address, it will simply return the IP address.
"""
try:
family = socket.AF_INET
if getIPV6:
family = socket.AF_INET6
details = socket.getaddrinfo(fqdn, 80, family, socket.SOCK_STREAM)
if details is None or len(details) < 1:
logging.error('Unable to resolve "%s" to a network address', fqdn)
elif len(details) > 1:
logging.warning('"%s" returned %d results, only using the first entry', fqdn, len(details))
return details[0][4][0]
except:
logging.exception('Unable to resolve "%s"', fqdn)
return None
def registerHandler(self, handler, cmds):
""" API: Registers a handler to be called for cmds defined in list
Does not have unregister since this should not change during its lifetime
"""
self.handlers.append({'handler':handler, 'commands': cmds})
def addCommand(self, command, cmdtype, handler, name = None, desc = None, extras = None, args = 0):
""" Convenience function, allows adding commands to the list which
is exposed by getCommands() and handleCommand()
"""
name = command
desc = name
if extras == None:
self.COMMAND_HANDLER[command] = {
"arguments" : args,
"handler" : handler,
"name" : name,
"description" : desc,
"type" : cmdtype
}
else:
self.COMMAND_HANDLER[command] = {
"arguments" : args,
"handler" : handler,
"name" : name,
"description" : desc,
"type" : cmdtype,
"extras" : extras
}
############################################################################
## Functions below are usually not overriden since they provide basic
## housekeeping. It's better to override eventXXX() functions above.
############################################################################
def setPower(self, enable):
""" API: Changes the power state of the device, if the state already
is at the requested value, then nothing happens.
"""
if self.power == enable:
return True
self.power = enable
try:
if enable:
self.eventOn()
else:
self.eventOff()
except:
logging.exception("Exception when calling setPower(%s)" % repr(enable))
return True
def applyExtras(self, keyvaluepairs):
""" API: Called when this device is selected as a scene, can be called more
than once during a powered session, since user may switch between
different scenes which all use the same driver but different extras.
By default, this parses a string that looks like this:
key=value,key=value,...
And calls eventExtras() with a dict, but drivers can override this
directly if needed. Otherwise, eventExtras is the recommended override
method.
"""
result = {}
pairs = keyvaluepairs.split(",")
for pair in pairs:
parts = pair.split("=", 1)
if len(parts) == 2:
result[parts[0].strip()] = parts[1].strip()
if len(result) > 0:
self.eventExtras(result)
def handleCommand(self, zone, command, argument):
""" API: Called by the server whenever a command needs to be executed,
the only exception is power commands, they are ALWAYS called
through the setPower() function.
-- FUTURE: --
Eventually it will do low-level handling of state, what that
means is that certain command types will be grouped and multiple
calls to the same command will only execute the first one.
For example, calling input-hdmi1 three times will only execute
the first time. This is to avoid unnecessary latencies.
A driver will be able to override this behavior by adding a flag
to the command definition.
"""
'''
result = None
for handler in self.handlers:
if command in handler['commands']:
try:
result = handler['handler'](zone, command, argument)
except:
logging.exception("Exception executing command %s for zone %s" % (repr(command), repr(zone)))
break
return result
'''
result = None
if command not in self.COMMAND_HANDLER:
logging.error("%s is not a supported command" % command)
return result
try:
item = self.COMMAND_HANDLER[command]
if item["arguments"] == 0:
if "extras" in item:
result = item["handler"](zone, item["extras"])
else:
result = item["handler"](zone)
elif item["arguments"] == 1:
if "extras" in item:
result = item["handler"](zone, argument[0], item["extras"])
else:
result = item["handler"](zone, argument[0])
return result
except:
logging.exception("Exception executing command %s for zone %s" % (repr(command), repr(zone)))
return None
def getCommands(self):
""" API: Returns the list of supported commands. For now it also limits this
list depending on the type. This is less than ideal, but for now
this is how it's done.
"""
ret = {}
'''
for handler in self.handlers:
for cmd in handler['commands']:
ret
'''
for c in self.COMMAND_HANDLER:
# Do not expose certain commands
if self.COMMAND_HANDLER[c]["type"] > CommandType.LIMIT_GETCOMMANDS:
continue
ret[c] = {"name": "", "description": ""}
if "name" in self.COMMAND_HANDLER[c]:
ret[c]["name"] = self.COMMAND_HANDLER[c]["name"]
if "description" in self.COMMAND_HANDLER[c]:
ret[c]["description"] = self.COMMAND_HANDLER[c]["description"]
ret[c]["type"] = self.COMMAND_HANDLER[c]["type"]
return ret
def sendEvent(self, eventType, eventSource, eventData, zone=None):
# self.events.notify(None, {"type":"zone", "source" : remote, "data": {"zone" : zone, "inuse" : True}})
self.eventManager.notify(zone, {"type":eventType, "source":eventSource, "data":eventData})
| gpl-2.0 | 7,568,016,081,454,353,000 | -8,050,472,308,428,375,000 | 33.003413 | 115 | 0.618488 | false |
reinis-martinsons/gyb-frp | pyasn1/type/univ.py | 86 | 44623 | # ASN.1 "universal" data types
import operator, sys, math
from pyasn1.type import base, tag, constraint, namedtype, namedval, tagmap
from pyasn1.codec.ber import eoo
from pyasn1.compat import octets
from pyasn1 import error
# "Simple" ASN.1 types (yet incomplete)
class Integer(base.AbstractSimpleAsn1Item):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x02)
)
namedValues = namedval.NamedValues()
def __init__(self, value=None, tagSet=None, subtypeSpec=None,
namedValues=None):
if namedValues is None:
self.__namedValues = self.namedValues
else:
self.__namedValues = namedValues
base.AbstractSimpleAsn1Item.__init__(
self, value, tagSet, subtypeSpec
)
def __repr__(self):
if self.__namedValues is not self.namedValues:
return '%s, %r)' % (base.AbstractSimpleAsn1Item.__repr__(self)[:-1], self.__namedValues)
else:
return base.AbstractSimpleAsn1Item.__repr__(self)
def __and__(self, value): return self.clone(self._value & value)
def __rand__(self, value): return self.clone(value & self._value)
def __or__(self, value): return self.clone(self._value | value)
def __ror__(self, value): return self.clone(value | self._value)
def __xor__(self, value): return self.clone(self._value ^ value)
def __rxor__(self, value): return self.clone(value ^ self._value)
def __lshift__(self, value): return self.clone(self._value << value)
def __rshift__(self, value): return self.clone(self._value >> value)
def __add__(self, value): return self.clone(self._value + value)
def __radd__(self, value): return self.clone(value + self._value)
def __sub__(self, value): return self.clone(self._value - value)
def __rsub__(self, value): return self.clone(value - self._value)
def __mul__(self, value): return self.clone(self._value * value)
def __rmul__(self, value): return self.clone(value * self._value)
def __mod__(self, value): return self.clone(self._value % value)
def __rmod__(self, value): return self.clone(value % self._value)
def __pow__(self, value, modulo=None): return self.clone(pow(self._value, value, modulo))
def __rpow__(self, value): return self.clone(pow(value, self._value))
if sys.version_info[0] <= 2:
def __div__(self, value): return self.clone(self._value // value)
def __rdiv__(self, value): return self.clone(value // self._value)
else:
def __truediv__(self, value): return self.clone(self._value / value)
def __rtruediv__(self, value): return self.clone(value / self._value)
def __divmod__(self, value): return self.clone(self._value // value)
def __rdivmod__(self, value): return self.clone(value // self._value)
__hash__ = base.AbstractSimpleAsn1Item.__hash__
def __int__(self): return int(self._value)
if sys.version_info[0] <= 2:
def __long__(self): return long(self._value)
def __float__(self): return float(self._value)
def __abs__(self): return self.clone(abs(self._value))
def __index__(self): return int(self._value)
def __pos__(self): return self.clone(+self._value)
def __neg__(self): return self.clone(-self._value)
def __invert__(self): return self.clone(~self._value)
def __round__(self, n=0):
r = round(self._value, n)
if n:
return self.clone(r)
else:
return r
def __floor__(self): return math.floor(self._value)
def __ceil__(self): return math.ceil(self._value)
if sys.version_info[0:2] > (2, 5):
def __trunc__(self): return self.clone(math.trunc(self._value))
def __lt__(self, value): return self._value < value
def __le__(self, value): return self._value <= value
def __eq__(self, value): return self._value == value
def __ne__(self, value): return self._value != value
def __gt__(self, value): return self._value > value
def __ge__(self, value): return self._value >= value
def prettyIn(self, value):
if not isinstance(value, str):
try:
return int(value)
except:
raise error.PyAsn1Error(
'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1])
)
r = self.__namedValues.getValue(value)
if r is not None:
return r
try:
return int(value)
except:
raise error.PyAsn1Error(
'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1])
)
def prettyOut(self, value):
r = self.__namedValues.getName(value)
return r is None and str(value) or repr(r)
def getNamedValues(self): return self.__namedValues
def clone(self, value=None, tagSet=None, subtypeSpec=None,
namedValues=None):
if value is None and tagSet is None and subtypeSpec is None \
and namedValues is None:
return self
if value is None:
value = self._value
if tagSet is None:
tagSet = self._tagSet
if subtypeSpec is None:
subtypeSpec = self._subtypeSpec
if namedValues is None:
namedValues = self.__namedValues
return self.__class__(value, tagSet, subtypeSpec, namedValues)
def subtype(self, value=None, implicitTag=None, explicitTag=None,
subtypeSpec=None, namedValues=None):
if value is None:
value = self._value
if implicitTag is not None:
tagSet = self._tagSet.tagImplicitly(implicitTag)
elif explicitTag is not None:
tagSet = self._tagSet.tagExplicitly(explicitTag)
else:
tagSet = self._tagSet
if subtypeSpec is None:
subtypeSpec = self._subtypeSpec
else:
subtypeSpec = subtypeSpec + self._subtypeSpec
if namedValues is None:
namedValues = self.__namedValues
else:
namedValues = namedValues + self.__namedValues
return self.__class__(value, tagSet, subtypeSpec, namedValues)
class Boolean(Integer):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x01),
)
subtypeSpec = Integer.subtypeSpec+constraint.SingleValueConstraint(0,1)
namedValues = Integer.namedValues.clone(('False', 0), ('True', 1))
class BitString(base.AbstractSimpleAsn1Item):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x03)
)
namedValues = namedval.NamedValues()
def __init__(self, value=None, tagSet=None, subtypeSpec=None,
namedValues=None):
if namedValues is None:
self.__namedValues = self.namedValues
else:
self.__namedValues = namedValues
base.AbstractSimpleAsn1Item.__init__(
self, value, tagSet, subtypeSpec
)
def clone(self, value=None, tagSet=None, subtypeSpec=None,
namedValues=None):
if value is None and tagSet is None and subtypeSpec is None \
and namedValues is None:
return self
if value is None:
value = self._value
if tagSet is None:
tagSet = self._tagSet
if subtypeSpec is None:
subtypeSpec = self._subtypeSpec
if namedValues is None:
namedValues = self.__namedValues
return self.__class__(value, tagSet, subtypeSpec, namedValues)
def subtype(self, value=None, implicitTag=None, explicitTag=None,
subtypeSpec=None, namedValues=None):
if value is None:
value = self._value
if implicitTag is not None:
tagSet = self._tagSet.tagImplicitly(implicitTag)
elif explicitTag is not None:
tagSet = self._tagSet.tagExplicitly(explicitTag)
else:
tagSet = self._tagSet
if subtypeSpec is None:
subtypeSpec = self._subtypeSpec
else:
subtypeSpec = subtypeSpec + self._subtypeSpec
if namedValues is None:
namedValues = self.__namedValues
else:
namedValues = namedValues + self.__namedValues
return self.__class__(value, tagSet, subtypeSpec, namedValues)
def __str__(self): return str(tuple(self))
# Immutable sequence object protocol
def __len__(self):
if self._len is None:
self._len = len(self._value)
return self._len
def __getitem__(self, i):
if isinstance(i, slice):
return self.clone(operator.getitem(self._value, i))
else:
return self._value[i]
def __add__(self, value): return self.clone(self._value + value)
def __radd__(self, value): return self.clone(value + self._value)
def __mul__(self, value): return self.clone(self._value * value)
def __rmul__(self, value): return self * value
def prettyIn(self, value):
r = []
if not value:
return ()
elif isinstance(value, str):
if value[0] == '\'':
if value[-2:] == '\'B':
for v in value[1:-2]:
if v == '0':
r.append(0)
elif v == '1':
r.append(1)
else:
raise error.PyAsn1Error(
'Non-binary BIT STRING initializer %s' % (v,)
)
return tuple(r)
elif value[-2:] == '\'H':
for v in value[1:-2]:
i = 4
v = int(v, 16)
while i:
i = i - 1
r.append((v>>i)&0x01)
return tuple(r)
else:
raise error.PyAsn1Error(
'Bad BIT STRING value notation %s' % (value,)
)
else:
for i in value.split(','):
j = self.__namedValues.getValue(i)
if j is None:
raise error.PyAsn1Error(
'Unknown bit identifier \'%s\'' % (i,)
)
if j >= len(r):
r.extend([0]*(j-len(r)+1))
r[j] = 1
return tuple(r)
elif isinstance(value, (tuple, list)):
r = tuple(value)
for b in r:
if b and b != 1:
raise error.PyAsn1Error(
'Non-binary BitString initializer \'%s\'' % (r,)
)
return r
elif isinstance(value, BitString):
return tuple(value)
else:
raise error.PyAsn1Error(
'Bad BitString initializer type \'%s\'' % (value,)
)
def prettyOut(self, value):
return '\"\'%s\'B\"' % ''.join([str(x) for x in value])
try:
all
except NameError: # Python 2.4
def all(iterable):
for element in iterable:
if not element:
return False
return True
class OctetString(base.AbstractSimpleAsn1Item):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x04)
)
defaultBinValue = defaultHexValue = base.noValue
encoding = 'us-ascii'
def __init__(self, value=None, tagSet=None, subtypeSpec=None,
encoding=None, binValue=None, hexValue=None):
if encoding is None:
self._encoding = self.encoding
else:
self._encoding = encoding
if binValue is not None:
value = self.fromBinaryString(binValue)
if hexValue is not None:
value = self.fromHexString(hexValue)
if value is None or value is base.noValue:
value = self.defaultHexValue
if value is None or value is base.noValue:
value = self.defaultBinValue
self.__asNumbersCache = None
base.AbstractSimpleAsn1Item.__init__(self, value, tagSet, subtypeSpec)
def clone(self, value=None, tagSet=None, subtypeSpec=None,
encoding=None, binValue=None, hexValue=None):
if value is None and tagSet is None and subtypeSpec is None and \
encoding is None and binValue is None and hexValue is None:
return self
if value is None and binValue is None and hexValue is None:
value = self._value
if tagSet is None:
tagSet = self._tagSet
if subtypeSpec is None:
subtypeSpec = self._subtypeSpec
if encoding is None:
encoding = self._encoding
return self.__class__(
value, tagSet, subtypeSpec, encoding, binValue, hexValue
)
if sys.version_info[0] <= 2:
def prettyIn(self, value):
if isinstance(value, str):
return value
elif isinstance(value, unicode):
try:
return value.encode(self._encoding)
except (LookupError, UnicodeEncodeError):
raise error.PyAsn1Error(
'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding)
)
elif isinstance(value, (tuple, list)):
try:
return ''.join([ chr(x) for x in value ])
except ValueError:
raise error.PyAsn1Error(
'Bad OctetString initializer \'%s\'' % (value,)
)
else:
return str(value)
else:
def prettyIn(self, value):
if isinstance(value, bytes):
return value
elif isinstance(value, str):
try:
return value.encode(self._encoding)
except UnicodeEncodeError:
raise error.PyAsn1Error(
'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding)
)
elif isinstance(value, OctetString):
return value.asOctets()
elif isinstance(value, (tuple, list, map)):
try:
return bytes(value)
except ValueError:
raise error.PyAsn1Error(
'Bad OctetString initializer \'%s\'' % (value,)
)
else:
try:
return str(value).encode(self._encoding)
except UnicodeEncodeError:
raise error.PyAsn1Error(
'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding)
)
def fromBinaryString(self, value):
bitNo = 8; byte = 0; r = ()
for v in value:
if bitNo:
bitNo = bitNo - 1
else:
bitNo = 7
r = r + (byte,)
byte = 0
if v == '0':
v = 0
elif v == '1':
v = 1
else:
raise error.PyAsn1Error(
'Non-binary OCTET STRING initializer %s' % (v,)
)
byte = byte | (v << bitNo)
return octets.ints2octs(r + (byte,))
def fromHexString(self, value):
r = p = ()
for v in value:
if p:
r = r + (int(p+v, 16),)
p = ()
else:
p = v
if p:
r = r + (int(p+'0', 16),)
return octets.ints2octs(r)
def prettyOut(self, value):
if sys.version_info[0] <= 2:
numbers = tuple(( ord(x) for x in value ))
else:
numbers = tuple(value)
if all(x >= 32 and x <= 126 for x in numbers):
return str(value)
else:
return '0x' + ''.join(( '%.2x' % x for x in numbers ))
def __repr__(self):
r = []
doHex = False
if self._value is not self.defaultValue:
for x in self.asNumbers():
if x < 32 or x > 126:
doHex = True
break
if not doHex:
r.append('%r' % (self._value,))
if self._tagSet is not self.tagSet:
r.append('tagSet=%r' % (self._tagSet,))
if self._subtypeSpec is not self.subtypeSpec:
r.append('subtypeSpec=%r' % (self._subtypeSpec,))
if self.encoding is not self._encoding:
r.append('encoding=%r' % (self._encoding,))
if doHex:
r.append('hexValue=%r' % ''.join([ '%.2x' % x for x in self.asNumbers() ]))
return '%s(%s)' % (self.__class__.__name__, ', '.join(r))
if sys.version_info[0] <= 2:
def __str__(self): return str(self._value)
def __unicode__(self):
return self._value.decode(self._encoding, 'ignore')
def asOctets(self): return self._value
def asNumbers(self):
if self.__asNumbersCache is None:
self.__asNumbersCache = tuple([ ord(x) for x in self._value ])
return self.__asNumbersCache
else:
def __str__(self): return self._value.decode(self._encoding, 'ignore')
def __bytes__(self): return self._value
def asOctets(self): return self._value
def asNumbers(self):
if self.__asNumbersCache is None:
self.__asNumbersCache = tuple(self._value)
return self.__asNumbersCache
# Immutable sequence object protocol
def __len__(self):
if self._len is None:
self._len = len(self._value)
return self._len
def __getitem__(self, i):
if isinstance(i, slice):
return self.clone(operator.getitem(self._value, i))
else:
return self._value[i]
def __add__(self, value): return self.clone(self._value + self.prettyIn(value))
def __radd__(self, value): return self.clone(self.prettyIn(value) + self._value)
def __mul__(self, value): return self.clone(self._value * value)
def __rmul__(self, value): return self * value
def __int__(self): return int(self._value)
def __float__(self): return float(self._value)
class Null(OctetString):
defaultValue = ''.encode() # This is tightly constrained
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x05)
)
subtypeSpec = OctetString.subtypeSpec+constraint.SingleValueConstraint(''.encode())
if sys.version_info[0] <= 2:
intTypes = (int, long)
else:
intTypes = (int,)
numericTypes = intTypes + (float,)
class ObjectIdentifier(base.AbstractSimpleAsn1Item):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x06)
)
def __add__(self, other): return self.clone(self._value + other)
def __radd__(self, other): return self.clone(other + self._value)
def asTuple(self): return self._value
# Sequence object protocol
def __len__(self):
if self._len is None:
self._len = len(self._value)
return self._len
def __getitem__(self, i):
if isinstance(i, slice):
return self.clone(
operator.getitem(self._value, i)
)
else:
return self._value[i]
def __str__(self): return self.prettyPrint()
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.prettyPrint())
def index(self, suboid): return self._value.index(suboid)
def isPrefixOf(self, value):
"""Returns true if argument OID resides deeper in the OID tree"""
l = len(self)
if l <= len(value):
if self._value[:l] == value[:l]:
return 1
return 0
def prettyIn(self, value):
"""Dotted -> tuple of numerics OID converter"""
if isinstance(value, tuple):
pass
elif isinstance(value, ObjectIdentifier):
return tuple(value)
elif octets.isStringType(value):
r = []
for element in [ x for x in value.split('.') if x != '' ]:
try:
r.append(int(element, 0))
except ValueError:
raise error.PyAsn1Error(
'Malformed Object ID %s at %s: %s' %
(str(value), self.__class__.__name__, sys.exc_info()[1])
)
value = tuple(r)
else:
try:
value = tuple(value)
except TypeError:
raise error.PyAsn1Error(
'Malformed Object ID %s at %s: %s' %
(str(value), self.__class__.__name__,sys.exc_info()[1])
)
for x in value:
if not isinstance(x, intTypes) or x < 0:
raise error.PyAsn1Error(
'Invalid sub-ID in %s at %s' % (value, self.__class__.__name__)
)
return value
def prettyOut(self, value): return '.'.join([ str(x) for x in value ])
class Real(base.AbstractSimpleAsn1Item):
binEncBase = None # binEncBase = 16 is recommended for large numbers
try:
_plusInf = float('inf')
_minusInf = float('-inf')
_inf = (_plusInf, _minusInf)
except ValueError:
# Infinity support is platform and Python dependent
_plusInf = _minusInf = None
_inf = ()
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x09)
)
def __normalizeBase10(self, value):
m, b, e = value
while m and m % 10 == 0:
m = m / 10
e = e + 1
return m, b, e
def prettyIn(self, value):
if isinstance(value, tuple) and len(value) == 3:
if not isinstance(value[0], numericTypes) or \
not isinstance(value[1], intTypes) or \
not isinstance(value[2], intTypes):
raise error.PyAsn1Error('Lame Real value syntax: %s' % (value,))
if isinstance(value[0], float) and \
self._inf and value[0] in self._inf:
return value[0]
if value[1] not in (2, 10):
raise error.PyAsn1Error(
'Prohibited base for Real value: %s' % (value[1],)
)
if value[1] == 10:
value = self.__normalizeBase10(value)
return value
elif isinstance(value, intTypes):
return self.__normalizeBase10((value, 10, 0))
elif isinstance(value, (str, float)):
if isinstance(value, str):
try:
value = float(value)
except ValueError:
raise error.PyAsn1Error(
'Bad real value syntax: %s' % (value,)
)
if self._inf and value in self._inf:
return value
else:
e = 0
while int(value) != value:
value = value * 10
e = e - 1
return self.__normalizeBase10((int(value), 10, e))
elif isinstance(value, Real):
return tuple(value)
raise error.PyAsn1Error(
'Bad real value syntax: %s' % (value,)
)
def prettyOut(self, value):
if value in self._inf:
return '\'%s\'' % value
else:
return str(value)
def prettyPrint(self, scope=0):
if self.isInfinity():
return self.prettyOut(self._value)
else:
return str(float(self))
def isPlusInfinity(self): return self._value == self._plusInf
def isMinusInfinity(self): return self._value == self._minusInf
def isInfinity(self): return self._value in self._inf
def __str__(self): return str(float(self))
def __add__(self, value): return self.clone(float(self) + value)
def __radd__(self, value): return self + value
def __mul__(self, value): return self.clone(float(self) * value)
def __rmul__(self, value): return self * value
def __sub__(self, value): return self.clone(float(self) - value)
def __rsub__(self, value): return self.clone(value - float(self))
def __mod__(self, value): return self.clone(float(self) % value)
def __rmod__(self, value): return self.clone(value % float(self))
def __pow__(self, value, modulo=None): return self.clone(pow(float(self), value, modulo))
def __rpow__(self, value): return self.clone(pow(value, float(self)))
if sys.version_info[0] <= 2:
def __div__(self, value): return self.clone(float(self) / value)
def __rdiv__(self, value): return self.clone(value / float(self))
else:
def __truediv__(self, value): return self.clone(float(self) / value)
def __rtruediv__(self, value): return self.clone(value / float(self))
def __divmod__(self, value): return self.clone(float(self) // value)
def __rdivmod__(self, value): return self.clone(value // float(self))
def __int__(self): return int(float(self))
if sys.version_info[0] <= 2:
def __long__(self): return long(float(self))
def __float__(self):
if self._value in self._inf:
return self._value
else:
return float(
self._value[0] * pow(self._value[1], self._value[2])
)
def __abs__(self): return self.clone(abs(float(self)))
def __pos__(self): return self.clone(+float(self))
def __neg__(self): return self.clone(-float(self))
def __round__(self, n=0):
r = round(float(self), n)
if n:
return self.clone(r)
else:
return r
def __floor__(self): return self.clone(math.floor(float(self)))
def __ceil__(self): return self.clone(math.ceil(float(self)))
if sys.version_info[0:2] > (2, 5):
def __trunc__(self): return self.clone(math.trunc(float(self)))
def __lt__(self, value): return float(self) < value
def __le__(self, value): return float(self) <= value
def __eq__(self, value): return float(self) == value
def __ne__(self, value): return float(self) != value
def __gt__(self, value): return float(self) > value
def __ge__(self, value): return float(self) >= value
if sys.version_info[0] <= 2:
def __nonzero__(self): return bool(float(self))
else:
def __bool__(self): return bool(float(self))
__hash__ = base.AbstractSimpleAsn1Item.__hash__
def __getitem__(self, idx):
if self._value in self._inf:
raise error.PyAsn1Error('Invalid infinite value operation')
else:
return self._value[idx]
class Enumerated(Integer):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x0A)
)
# "Structured" ASN.1 types
class SetOf(base.AbstractConstructedAsn1Item):
componentType = None
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11)
)
typeId = 1
strictConstraints = False
def _cloneComponentValues(self, myClone, cloneValueFlag):
idx = 0; l = len(self._componentValues)
while idx < l:
c = self._componentValues[idx]
if c is not None:
if isinstance(c, base.AbstractConstructedAsn1Item):
myClone.setComponentByPosition(
idx, c.clone(cloneValueFlag=cloneValueFlag)
)
else:
myClone.setComponentByPosition(idx, c.clone())
idx = idx + 1
def _verifyComponent(self, idx, value):
t = self._componentType
if t is None:
return
if not t.isSameTypeWith(value,matchConstraints=self.strictConstraints):
raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, t))
if self.strictConstraints and \
not t.isSuperTypeOf(value, matchTags=False):
raise error.PyAsn1Error('Component value is constraints-incompatible: %r vs %r' % (value, t))
def getComponentByPosition(self, idx): return self._componentValues[idx]
def setComponentByPosition(self, idx, value=None, verifyConstraints=True):
l = len(self._componentValues)
if idx >= l:
self._componentValues = self._componentValues + (idx-l+1)*[None]
if value is None:
if self._componentValues[idx] is None:
if self._componentType is None:
raise error.PyAsn1Error('Component type not defined')
self._componentValues[idx] = self._componentType.clone()
self._componentValuesSet = self._componentValuesSet + 1
return self
elif not isinstance(value, base.Asn1Item):
if self._componentType is None:
raise error.PyAsn1Error('Component type not defined')
if isinstance(self._componentType, base.AbstractSimpleAsn1Item):
value = self._componentType.clone(value=value)
else:
raise error.PyAsn1Error('Instance value required')
if verifyConstraints:
if self._componentType is not None:
self._verifyComponent(idx, value)
self._verifySubtypeSpec(value, idx)
if self._componentValues[idx] is None:
self._componentValuesSet = self._componentValuesSet + 1
self._componentValues[idx] = value
return self
def getComponentTagMap(self):
if self._componentType is not None:
return self._componentType.getTagMap()
def prettyPrint(self, scope=0):
scope = scope + 1
r = self.__class__.__name__ + ':\n'
for idx in range(len(self._componentValues)):
r = r + ' '*scope
if self._componentValues[idx] is None:
r = r + '<empty>'
else:
r = r + self._componentValues[idx].prettyPrint(scope)
return r
def prettyPrintType(self, scope=0):
scope = scope + 1
r = '%s -> %s {\n' % (self.getTagSet(), self.__class__.__name__)
if self._componentType is not None:
r = r + ' '*scope
r = r + self._componentType.prettyPrintType(scope)
return r + '\n' + ' '*(scope-1) + '}'
class SequenceOf(SetOf):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10)
)
typeId = 2
class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
componentType = namedtype.NamedTypes()
strictConstraints = False
def __init__(self, componentType=None, tagSet=None,
subtypeSpec=None, sizeSpec=None):
if componentType is None:
componentType = self.componentType
base.AbstractConstructedAsn1Item.__init__(
self, componentType.clone(), tagSet, subtypeSpec, sizeSpec
)
self._componentTypeLen = len(self._componentType)
def __getitem__(self, idx):
if isinstance(idx, str):
return self.getComponentByName(idx)
else:
return base.AbstractConstructedAsn1Item.__getitem__(self, idx)
def __setitem__(self, idx, value):
if isinstance(idx, str):
self.setComponentByName(idx, value)
else:
base.AbstractConstructedAsn1Item.__setitem__(self, idx, value)
def _cloneComponentValues(self, myClone, cloneValueFlag):
idx = 0; l = len(self._componentValues)
while idx < l:
c = self._componentValues[idx]
if c is not None:
if isinstance(c, base.AbstractConstructedAsn1Item):
myClone.setComponentByPosition(
idx, c.clone(cloneValueFlag=cloneValueFlag)
)
else:
myClone.setComponentByPosition(idx, c.clone())
idx = idx + 1
def _verifyComponent(self, idx, value):
if idx >= self._componentTypeLen:
raise error.PyAsn1Error(
'Component type error out of range'
)
t = self._componentType[idx].getType()
if not t.isSameTypeWith(value,matchConstraints=self.strictConstraints):
raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, t))
if self.strictConstraints and \
not t.isSuperTypeOf(value, matchTags=False):
raise error.PyAsn1Error('Component value is constraints-incompatible: %r vs %r' % (value, t))
def getComponentByName(self, name):
return self.getComponentByPosition(
self._componentType.getPositionByName(name)
)
def setComponentByName(self, name, value=None, verifyConstraints=True):
return self.setComponentByPosition(
self._componentType.getPositionByName(name),value,verifyConstraints
)
def getComponentByPosition(self, idx):
try:
return self._componentValues[idx]
except IndexError:
if idx < self._componentTypeLen:
return
raise
def setComponentByPosition(self, idx, value=None,
verifyConstraints=True,
exactTypes=False,
matchTags=True,
matchConstraints=True):
l = len(self._componentValues)
if idx >= l:
self._componentValues = self._componentValues + (idx-l+1)*[None]
if value is None:
if self._componentValues[idx] is None:
self._componentValues[idx] = self._componentType.getTypeByPosition(idx).clone()
self._componentValuesSet = self._componentValuesSet + 1
return self
elif not isinstance(value, base.Asn1Item):
t = self._componentType.getTypeByPosition(idx)
if isinstance(t, base.AbstractSimpleAsn1Item):
value = t.clone(value=value)
else:
raise error.PyAsn1Error('Instance value required')
if verifyConstraints:
if self._componentTypeLen:
self._verifyComponent(idx, value)
self._verifySubtypeSpec(value, idx)
if self._componentValues[idx] is None:
self._componentValuesSet = self._componentValuesSet + 1
self._componentValues[idx] = value
return self
def getNameByPosition(self, idx):
if self._componentTypeLen:
return self._componentType.getNameByPosition(idx)
def getDefaultComponentByPosition(self, idx):
if self._componentTypeLen and self._componentType[idx].isDefaulted:
return self._componentType[idx].getType()
def getComponentType(self):
if self._componentTypeLen:
return self._componentType
def setDefaultComponents(self):
if self._componentTypeLen == self._componentValuesSet:
return
idx = self._componentTypeLen
while idx:
idx = idx - 1
if self._componentType[idx].isDefaulted:
if self.getComponentByPosition(idx) is None:
self.setComponentByPosition(idx)
elif not self._componentType[idx].isOptional:
if self.getComponentByPosition(idx) is None:
raise error.PyAsn1Error(
'Uninitialized component #%s at %r' % (idx, self)
)
def prettyPrint(self, scope=0):
scope = scope + 1
r = self.__class__.__name__ + ':\n'
for idx in range(len(self._componentValues)):
if self._componentValues[idx] is not None:
r = r + ' '*scope
componentType = self.getComponentType()
if componentType is None:
r = r + '<no-name>'
else:
r = r + componentType.getNameByPosition(idx)
r = '%s=%s\n' % (
r, self._componentValues[idx].prettyPrint(scope)
)
return r
def prettyPrintType(self, scope=0):
scope = scope + 1
r = '%s -> %s {\n' % (self.getTagSet(), self.__class__.__name__)
for idx in range(len(self.componentType)):
r = r + ' '*scope
r = r + '"%s"' % self.componentType.getNameByPosition(idx)
r = '%s = %s\n' % (
r, self._componentType.getTypeByPosition(idx).prettyPrintType(scope)
)
return r + '\n' + ' '*(scope-1) + '}'
class Sequence(SequenceAndSetBase):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10)
)
typeId = 3
def getComponentTagMapNearPosition(self, idx):
if self._componentType:
return self._componentType.getTagMapNearPosition(idx)
def getComponentPositionNearType(self, tagSet, idx):
if self._componentType:
return self._componentType.getPositionNearType(tagSet, idx)
else:
return idx
class Set(SequenceAndSetBase):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11)
)
typeId = 4
def getComponent(self, innerFlag=0): return self
def getComponentByType(self, tagSet, innerFlag=0):
c = self.getComponentByPosition(
self._componentType.getPositionByType(tagSet)
)
if innerFlag and isinstance(c, Set):
# get inner component by inner tagSet
return c.getComponent(1)
else:
# get outer component by inner tagSet
return c
def setComponentByType(self, tagSet, value=None, innerFlag=0,
verifyConstraints=True):
idx = self._componentType.getPositionByType(tagSet)
t = self._componentType.getTypeByPosition(idx)
if innerFlag: # set inner component by inner tagSet
if t.getTagSet():
return self.setComponentByPosition(
idx, value, verifyConstraints
)
else:
t = self.setComponentByPosition(idx).getComponentByPosition(idx)
return t.setComponentByType(
tagSet, value, innerFlag, verifyConstraints
)
else: # set outer component by inner tagSet
return self.setComponentByPosition(
idx, value, verifyConstraints
)
def getComponentTagMap(self):
if self._componentType:
return self._componentType.getTagMap(True)
def getComponentPositionByType(self, tagSet):
if self._componentType:
return self._componentType.getPositionByType(tagSet)
class Choice(Set):
tagSet = baseTagSet = tag.TagSet() # untagged
sizeSpec = constraint.ConstraintsIntersection(
constraint.ValueSizeConstraint(1, 1)
)
typeId = 5
_currentIdx = None
def __eq__(self, other):
if self._componentValues:
return self._componentValues[self._currentIdx] == other
return NotImplemented
def __ne__(self, other):
if self._componentValues:
return self._componentValues[self._currentIdx] != other
return NotImplemented
def __lt__(self, other):
if self._componentValues:
return self._componentValues[self._currentIdx] < other
return NotImplemented
def __le__(self, other):
if self._componentValues:
return self._componentValues[self._currentIdx] <= other
return NotImplemented
def __gt__(self, other):
if self._componentValues:
return self._componentValues[self._currentIdx] > other
return NotImplemented
def __ge__(self, other):
if self._componentValues:
return self._componentValues[self._currentIdx] >= other
return NotImplemented
if sys.version_info[0] <= 2:
def __nonzero__(self): return bool(self._componentValues)
else:
def __bool__(self): return bool(self._componentValues)
def __len__(self): return self._currentIdx is not None and 1 or 0
def verifySizeSpec(self):
if self._currentIdx is None:
raise error.PyAsn1Error('Component not chosen')
else:
self._sizeSpec(' ')
def _cloneComponentValues(self, myClone, cloneValueFlag):
try:
c = self.getComponent()
except error.PyAsn1Error:
pass
else:
if isinstance(c, Choice):
tagSet = c.getEffectiveTagSet()
else:
tagSet = c.getTagSet()
if isinstance(c, base.AbstractConstructedAsn1Item):
myClone.setComponentByType(
tagSet, c.clone(cloneValueFlag=cloneValueFlag)
)
else:
myClone.setComponentByType(tagSet, c.clone())
def setComponentByPosition(self, idx, value=None, verifyConstraints=True):
l = len(self._componentValues)
if idx >= l:
self._componentValues = self._componentValues + (idx-l+1)*[None]
if self._currentIdx is not None:
self._componentValues[self._currentIdx] = None
if value is None:
if self._componentValues[idx] is None:
self._componentValues[idx] = self._componentType.getTypeByPosition(idx).clone()
self._componentValuesSet = 1
self._currentIdx = idx
return self
elif not isinstance(value, base.Asn1Item):
value = self._componentType.getTypeByPosition(idx).clone(
value=value
)
if verifyConstraints:
if self._componentTypeLen:
self._verifyComponent(idx, value)
self._verifySubtypeSpec(value, idx)
self._componentValues[idx] = value
self._currentIdx = idx
self._componentValuesSet = 1
return self
def getMinTagSet(self):
if self._tagSet:
return self._tagSet
else:
return self._componentType.genMinTagSet()
def getEffectiveTagSet(self):
if self._tagSet:
return self._tagSet
else:
c = self.getComponent()
if isinstance(c, Choice):
return c.getEffectiveTagSet()
else:
return c.getTagSet()
def getTagMap(self):
if self._tagSet:
return Set.getTagMap(self)
else:
return Set.getComponentTagMap(self)
def getComponent(self, innerFlag=0):
if self._currentIdx is None:
raise error.PyAsn1Error('Component not chosen')
else:
c = self._componentValues[self._currentIdx]
if innerFlag and isinstance(c, Choice):
return c.getComponent(innerFlag)
else:
return c
def getName(self, innerFlag=0):
if self._currentIdx is None:
raise error.PyAsn1Error('Component not chosen')
else:
if innerFlag:
c = self._componentValues[self._currentIdx]
if isinstance(c, Choice):
return c.getName(innerFlag)
return self._componentType.getNameByPosition(self._currentIdx)
def setDefaultComponents(self): pass
class Any(OctetString):
tagSet = baseTagSet = tag.TagSet() # untagged
typeId = 6
def getTagMap(self):
return tagmap.TagMap(
{ self.getTagSet(): self },
{ eoo.endOfOctets.getTagSet(): eoo.endOfOctets },
self
)
# XXX
# coercion rules?
| apache-2.0 | 2,129,363,896,727,359,500 | -3,640,191,986,370,396,000 | 37.601211 | 105 | 0.548999 | false |
datenbetrieb/odoo | openerp/addons/test_inherit/models.py | 91 | 3456 | # -*- coding: utf-8 -*-
from openerp import models, fields, api, osv
# We just create a new model
class mother(models.Model):
_name = 'test.inherit.mother'
_columns = {
# check interoperability of field inheritance with old-style fields
'name': osv.fields.char('Name'),
'state': osv.fields.selection([('a', 'A'), ('b', 'B')], string='State'),
}
_defaults = {
'name': 'Foo',
}
surname = fields.Char(compute='_compute_surname')
@api.one
@api.depends('name')
def _compute_surname(self):
self.surname = self.name or ''
# We want to inherits from the parent model and we add some fields
# in the child object
class daughter(models.Model):
_name = 'test.inherit.daughter'
template_id = fields.Many2one('test.inherit.mother', 'Template',
delegate=True, required=True, ondelete='cascade')
field_in_daughter = fields.Char('Field1')
# We add a new field in the parent object. Because of a recent refactoring,
# this feature was broken.
# This test and these models try to show the bug and fix it.
class mother(models.Model):
_inherit = 'test.inherit.mother'
field_in_mother = fields.Char()
partner_id = fields.Many2one('res.partner')
# extend the name field: make it required and change its default value
name = fields.Char(required=True, default='Bar')
# extend the selection of the state field
state = fields.Selection(selection_add=[('c', 'C')])
# override the computed field, and extend its dependencies
@api.one
@api.depends('field_in_mother')
def _compute_surname(self):
if self.field_in_mother:
self.surname = self.field_in_mother
else:
super(mother, self)._compute_surname()
class mother(models.Model):
_inherit = 'test.inherit.mother'
# extend again the selection of the state field
state = fields.Selection(selection_add=[('d', 'D')])
class daughter(models.Model):
_inherit = 'test.inherit.daughter'
# simply redeclare the field without adding any option
template_id = fields.Many2one()
# change the default value of an inherited field
name = fields.Char(default='Baz')
class res_partner(models.Model):
_inherit = 'res.partner'
# define a one2many field based on the inherited field partner_id
daughter_ids = fields.One2many('test.inherit.daughter', 'partner_id')
# Check the overriding of property fields by non-property fields.
# Contribution by Adrien Peiffer (ACSONE).
class test_inherit_property(models.Model):
_name = 'test.inherit.property'
_columns = {
'name': osv.fields.char('Name', required=True),
'property_foo': osv.fields.property(string='Foo', type='integer'),
'property_bar': osv.fields.property(string='Bar', type='integer'),
}
class test_inherit_property(models.Model):
_inherit = 'test.inherit.property'
@api.multi
def _get_foo(self, name, arg):
return dict.fromkeys(self.ids, 42)
_columns = {
# override property_foo with an old-api function field
'property_foo': osv.fields.function(_get_foo, type='integer', string='Foo'),
}
# override property_bar with a new-api computed field
property_bar = fields.Integer(compute='_compute_bar', company_dependent=False)
@api.multi
def _compute_bar(self):
for record in self:
record.property_bar = 42
| agpl-3.0 | 8,244,610,308,027,191,000 | -1,628,276,479,203,286,500 | 29.857143 | 84 | 0.653067 | false |
2014cdag4/2014cdag4 | wsgi/static/Brython2.1.0-20140419-113919/Lib/multiprocessing/dummy/connection.py | 707 | 3049 | #
# Analogue of `multiprocessing.connection` which uses queues instead of sockets
#
# multiprocessing/dummy/connection.py
#
# Copyright (c) 2006-2008, R Oudkerk
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of author nor the names of any contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
__all__ = [ 'Client', 'Listener', 'Pipe' ]
from queue import Queue
families = [None]
class Listener(object):
def __init__(self, address=None, family=None, backlog=1):
self._backlog_queue = Queue(backlog)
def accept(self):
return Connection(*self._backlog_queue.get())
def close(self):
self._backlog_queue = None
address = property(lambda self: self._backlog_queue)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self.close()
def Client(address):
_in, _out = Queue(), Queue()
address.put((_out, _in))
return Connection(_in, _out)
def Pipe(duplex=True):
a, b = Queue(), Queue()
return Connection(a, b), Connection(b, a)
class Connection(object):
def __init__(self, _in, _out):
self._out = _out
self._in = _in
self.send = self.send_bytes = _out.put
self.recv = self.recv_bytes = _in.get
def poll(self, timeout=0.0):
if self._in.qsize() > 0:
return True
if timeout <= 0.0:
return False
self._in.not_empty.acquire()
self._in.not_empty.wait(timeout)
self._in.not_empty.release()
return self._in.qsize() > 0
def close(self):
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self.close()
| gpl-2.0 | 5,388,067,962,131,202,000 | 3,002,976,629,355,623,400 | 29.79798 | 79 | 0.677599 | false |
popazerty/obh-gui | lib/python/Plugins/SystemPlugins/SoftwareManager/BackupRestore.py | 17 | 12974 | from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Screens.Console import Console
from Components.ActionMap import ActionMap, NumberActionMap
from Components.Pixmap import Pixmap
from Components.Label import Label
from Components.Sources.StaticText import StaticText
from Components.MenuList import MenuList
from Components.config import getConfigListEntry, configfile, ConfigSelection, ConfigSubsection, ConfigText, ConfigLocations
from Components.config import config
from Components.ConfigList import ConfigList,ConfigListScreen
from Components.FileList import MultiFileSelectList
from Plugins.Plugin import PluginDescriptor
from enigma import eTimer, eEnv, eEPGCache
from Tools.Directories import *
from os import popen, path, makedirs, listdir, access, stat, rename, remove, W_OK, R_OK
from time import gmtime, strftime, localtime
from datetime import date
config.plugins.configurationbackup = ConfigSubsection()
config.plugins.configurationbackup.backuplocation = ConfigText(default = '/media/hdd/', visible_width = 50, fixed_size = False)
config.plugins.configurationbackup.backupdirs = ConfigLocations(default=[eEnv.resolve('${sysconfdir}/enigma2/'), '/etc/network/interfaces', '/etc/wpa_supplicant.conf', '/etc/wpa_supplicant.ath0.conf', '/etc/wpa_supplicant.wlan0.conf', '/etc/resolv.conf', '/etc/default_gw', '/etc/hostname'])
def getBackupPath():
backuppath = config.plugins.configurationbackup.backuplocation.value
if backuppath.endswith('/'):
return backuppath + 'backup'
else:
return backuppath + '/backup'
def getBackupFilename():
return "enigma2settingsbackup.tar.gz"
class BackupScreen(Screen, ConfigListScreen):
skin = """
<screen position="135,144" size="350,310" title="Backup is running" >
<widget name="config" position="10,10" size="330,250" transparent="1" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session, runBackup = False):
Screen.__init__(self, session)
self.session = session
self.runBackup = runBackup
self["actions"] = ActionMap(["WizardActions", "DirectionActions"],
{
"ok": self.close,
"back": self.close,
"cancel": self.close,
}, -1)
self.finished_cb = None
self.backuppath = getBackupPath()
self.backupfile = getBackupFilename()
self.fullbackupfilename = self.backuppath + "/" + self.backupfile
self.list = []
ConfigListScreen.__init__(self, self.list)
self.onLayoutFinish.append(self.layoutFinished)
if self.runBackup:
self.onShown.append(self.doBackup)
def layoutFinished(self):
self.setWindowTitle()
def setWindowTitle(self):
self.setTitle(_("Backup is running..."))
def doBackup(self):
configfile.save()
if config.plugins.softwaremanager.epgcache.value:
eEPGCache.getInstance().save()
try:
if (path.exists(self.backuppath) == False):
makedirs(self.backuppath)
self.backupdirs = ' '.join( config.plugins.configurationbackup.backupdirs.value )
if path.exists(self.fullbackupfilename):
dt = str(date.fromtimestamp(stat(self.fullbackupfilename).st_ctime))
self.newfilename = self.backuppath + "/" + dt + '-' + self.backupfile
if path.exists(self.newfilename):
remove(self.newfilename)
rename(self.fullbackupfilename,self.newfilename)
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, Console, title = _("Backup is running..."), cmdlist = ["tar -czvf " + self.fullbackupfilename + " " + self.backupdirs],finishedCallback = self.backupFinishedCB,closeOnSuccess = True)
else:
self.session.open(Console, title = _("Backup is running..."), cmdlist = ["tar -czvf " + self.fullbackupfilename + " " + self.backupdirs],finishedCallback = self.backupFinishedCB, closeOnSuccess = True)
except OSError:
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, MessageBox, _("Sorry, your backup destination is not writeable.\nPlease select a different one."), MessageBox.TYPE_INFO, timeout = 10 )
else:
self.session.openWithCallback(self.backupErrorCB,MessageBox, _("Sorry, your backup destination is not writeable.\nPlease select a different one."), MessageBox.TYPE_INFO, timeout = 10 )
def backupFinishedCB(self,retval = None):
self.close(True)
def backupErrorCB(self,retval = None):
self.close(False)
def runAsync(self, finished_cb):
self.finished_cb = finished_cb
self.doBackup()
class BackupSelection(Screen):
skin = """
<screen name="BackupSelection" position="center,center" size="560,400" title="Select files/folders to backup">
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" />
<widget name="checkList" position="5,50" size="550,250" transparent="1" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
self["key_yellow"] = StaticText()
self.selectedFiles = config.plugins.configurationbackup.backupdirs.value
defaultDir = '/'
inhibitDirs = ["/bin", "/boot", "/dev", "/autofs", "/lib", "/proc", "/sbin", "/sys", "/hdd", "/tmp", "/mnt", "/media"]
self.filelist = MultiFileSelectList(self.selectedFiles, defaultDir, inhibitDirs = inhibitDirs )
self["checkList"] = self.filelist
self["actions"] = ActionMap(["DirectionActions", "OkCancelActions", "ShortcutActions"],
{
"cancel": self.exit,
"red": self.exit,
"yellow": self.changeSelectionState,
"green": self.saveSelection,
"ok": self.okClicked,
"left": self.left,
"right": self.right,
"down": self.down,
"up": self.up
}, -1)
if not self.selectionChanged in self["checkList"].onSelectionChanged:
self["checkList"].onSelectionChanged.append(self.selectionChanged)
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
idx = 0
self["checkList"].moveToIndex(idx)
self.setWindowTitle()
self.selectionChanged()
def setWindowTitle(self):
self.setTitle(_("Select files/folders to backup"))
def selectionChanged(self):
current = self["checkList"].getCurrent()[0]
if current[2] is True:
self["key_yellow"].setText(_("Deselect"))
else:
self["key_yellow"].setText(_("Select"))
def up(self):
self["checkList"].up()
def down(self):
self["checkList"].down()
def left(self):
self["checkList"].pageUp()
def right(self):
self["checkList"].pageDown()
def changeSelectionState(self):
self["checkList"].changeSelectionState()
self.selectedFiles = self["checkList"].getSelectedList()
def saveSelection(self):
self.selectedFiles = self["checkList"].getSelectedList()
config.plugins.configurationbackup.backupdirs.value = self.selectedFiles
config.plugins.configurationbackup.backupdirs.save()
config.plugins.configurationbackup.save()
config.save()
self.close(None)
def exit(self):
self.close(None)
def okClicked(self):
if self.filelist.canDescent():
self.filelist.descent()
class RestoreMenu(Screen):
skin = """
<screen name="RestoreMenu" position="center,center" size="560,400" title="Restore backups" >
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" />
<widget name="filelist" position="5,50" size="550,230" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session, plugin_path):
Screen.__init__(self, session)
self.skin_path = plugin_path
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Restore"))
self["key_yellow"] = StaticText(_("Delete"))
self.sel = []
self.val = []
self.entry = False
self.exe = False
self.path = ""
self["actions"] = NumberActionMap(["SetupActions"],
{
"ok": self.KeyOk,
"cancel": self.keyCancel
}, -1)
self["shortcuts"] = ActionMap(["ShortcutActions"],
{
"red": self.keyCancel,
"green": self.KeyOk,
"yellow": self.deleteFile,
})
self.flist = []
self["filelist"] = MenuList(self.flist)
self.fill_list()
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setWindowTitle()
def setWindowTitle(self):
self.setTitle(_("Restore backups"))
def fill_list(self):
self.flist = []
self.path = getBackupPath()
if (path.exists(self.path) == False):
makedirs(self.path)
for file in listdir(self.path):
if (file.endswith(".tar.gz")):
self.flist.append((file))
self.entry = True
self.flist.sort(reverse=True)
self["filelist"].l.setList(self.flist)
def KeyOk(self):
if (self.exe == False) and (self.entry == True):
self.sel = self["filelist"].getCurrent()
if self.sel:
self.val = self.path + "/" + self.sel
self.session.openWithCallback(self.startRestore, MessageBox, _("Are you sure you want to restore\nthe following backup:\n%s\nYour receiver will restart after the backup has been restored!") % (self.sel))
def keyCancel(self):
self.close()
def startRestore(self, ret = False):
if (ret == True):
self.exe = True
self.session.open(Console, title = _("Restoring..."), cmdlist = ["tar -xzvf " + self.path + "/" + self.sel + " -C /", "killall -9 enigma2"])
def deleteFile(self):
if (self.exe == False) and (self.entry == True):
self.sel = self["filelist"].getCurrent()
if self.sel:
self.val = self.path + "/" + self.sel
self.session.openWithCallback(self.startDelete, MessageBox, _("Are you sure you want to delete\nthe following backup:\n") + self.sel)
def startDelete(self, ret = False):
if (ret == True):
self.exe = True
print "removing:",self.val
if (path.exists(self.val) == True):
remove(self.val)
self.exe = False
self.fill_list()
class RestoreScreen(Screen, ConfigListScreen):
skin = """
<screen position="135,144" size="350,310" title="Restore is running..." >
<widget name="config" position="10,10" size="330,250" transparent="1" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session, runRestore = False):
Screen.__init__(self, session)
self.session = session
self.runRestore = runRestore
self["actions"] = ActionMap(["WizardActions", "DirectionActions"],
{
"ok": self.close,
"back": self.close,
"cancel": self.close,
}, -1)
self.finished_cb = None
self.backuppath = getBackupPath()
self.backupfile = getBackupFilename()
self.fullbackupfilename = self.backuppath + "/" + self.backupfile
self.list = []
ConfigListScreen.__init__(self, self.list)
self.onLayoutFinish.append(self.layoutFinished)
if self.runRestore:
self.onShown.append(self.doRestore)
def layoutFinished(self):
self.setWindowTitle()
def setWindowTitle(self):
self.setTitle(_("Restoring..."))
def doRestore(self):
if path.exists("/proc/stb/vmpeg/0/dst_width"):
restorecmdlist = ["tar -xzvf " + self.fullbackupfilename + " -C /", "echo 0 > /proc/stb/vmpeg/0/dst_height", "echo 0 > /proc/stb/vmpeg/0/dst_left", "echo 0 > /proc/stb/vmpeg/0/dst_top", "echo 0 > /proc/stb/vmpeg/0/dst_width", "killall -9 enigma2"]
else:
restorecmdlist = ["tar -xzvf " + self.fullbackupfilename + " -C /", "killall -9 enigma2"]
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, Console, title = _("Restoring..."), cmdlist = restorecmdlist)
else:
self.session.open(Console, title = _("Restoring..."), cmdlist = restorecmdlist)
def backupFinishedCB(self,retval = None):
self.close(True)
def backupErrorCB(self,retval = None):
self.close(False)
def runAsync(self, finished_cb):
self.finished_cb = finished_cb
self.doRestore()
| gpl-2.0 | 306,332,650,458,159,100 | 6,657,239,257,906,901,000 | 37.844311 | 291 | 0.705488 | false |
mlperf/training_results_v0.5 | v0.5.0/google/cloud_v2.512/resnet-tpuv2-512/code/resnet/model/models/official/mnist/dataset.py | 7 | 4116 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""tf.data.Dataset interface to the MNIST dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gzip
import os
import shutil
import tempfile
import numpy as np
from six.moves import urllib
import tensorflow as tf
def read32(bytestream):
"""Read 4 bytes from bytestream as an unsigned 32-bit integer."""
dt = np.dtype(np.uint32).newbyteorder('>')
return np.frombuffer(bytestream.read(4), dtype=dt)[0]
def check_image_file_header(filename):
"""Validate that filename corresponds to images for the MNIST dataset."""
with tf.gfile.Open(filename, 'rb') as f:
magic = read32(f)
read32(f) # num_images, unused
rows = read32(f)
cols = read32(f)
if magic != 2051:
raise ValueError('Invalid magic number %d in MNIST file %s' % (magic,
f.name))
if rows != 28 or cols != 28:
raise ValueError(
'Invalid MNIST file %s: Expected 28x28 images, found %dx%d' %
(f.name, rows, cols))
def check_labels_file_header(filename):
"""Validate that filename corresponds to labels for the MNIST dataset."""
with tf.gfile.Open(filename, 'rb') as f:
magic = read32(f)
read32(f) # num_items, unused
if magic != 2049:
raise ValueError('Invalid magic number %d in MNIST file %s' % (magic,
f.name))
def download(directory, filename):
"""Download (and unzip) a file from the MNIST dataset if not already done."""
filepath = os.path.join(directory, filename)
if tf.gfile.Exists(filepath):
return filepath
if not tf.gfile.Exists(directory):
tf.gfile.MakeDirs(directory)
# CVDF mirror of http://yann.lecun.com/exdb/mnist/
url = 'https://storage.googleapis.com/cvdf-datasets/mnist/' + filename + '.gz'
_, zipped_filepath = tempfile.mkstemp(suffix='.gz')
print('Downloading %s to %s' % (url, zipped_filepath))
urllib.request.urlretrieve(url, zipped_filepath)
with gzip.open(zipped_filepath, 'rb') as f_in, \
tf.gfile.Open(filepath, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
os.remove(zipped_filepath)
return filepath
def dataset(directory, images_file, labels_file):
"""Download and parse MNIST dataset."""
images_file = download(directory, images_file)
labels_file = download(directory, labels_file)
check_image_file_header(images_file)
check_labels_file_header(labels_file)
def decode_image(image):
# Normalize from [0, 255] to [0.0, 1.0]
image = tf.decode_raw(image, tf.uint8)
image = tf.cast(image, tf.float32)
image = tf.reshape(image, [784])
return image / 255.0
def decode_label(label):
label = tf.decode_raw(label, tf.uint8) # tf.string -> [tf.uint8]
label = tf.reshape(label, []) # label is a scalar
return tf.to_int32(label)
images = tf.data.FixedLengthRecordDataset(
images_file, 28 * 28, header_bytes=16).map(decode_image)
labels = tf.data.FixedLengthRecordDataset(
labels_file, 1, header_bytes=8).map(decode_label)
return tf.data.Dataset.zip((images, labels))
def train(directory):
"""tf.data.Dataset object for MNIST training data."""
return dataset(directory, 'train-images-idx3-ubyte',
'train-labels-idx1-ubyte')
def test(directory):
"""tf.data.Dataset object for MNIST test data."""
return dataset(directory, 't10k-images-idx3-ubyte', 't10k-labels-idx1-ubyte')
| apache-2.0 | -1,377,635,505,263,773,000 | 3,864,186,881,795,070,500 | 34.179487 | 80 | 0.672498 | false |
chshu/openthread | tools/harness-automation/cases_R140/fed_9_2_13.py | 18 | 1873 | #!/usr/bin/env python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import unittest
from autothreadharness.harness_case import HarnessCase
class FED_9_2_13(HarnessCase):
role = HarnessCase.ROLE_FED
case = '9 2 13'
golden_devices_required = 5
def on_dialog(self, dialog, title):
pass
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | 2,818,547,469,963,597,000 | 207,302,375,052,595,600 | 40.622222 | 77 | 0.761345 | false |
educloudalliance/eca-auth-data | authdata/tests/test_datasources.py | 1 | 21342 |
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2014-2015 Haltu Oy, http://haltu.fi
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# pylint: disable=locally-disabled, no-member, protected-access
import base64
import mock
import requests
from django.test import TestCase
from django.test import RequestFactory
from django.test import override_settings
from authdata import models
from authdata.datasources.base import ExternalDataSource
import authdata.datasources.dreamschool
import authdata.datasources.ldap_base
import authdata.datasources.oulu
AUTH_EXTERNAL_SOURCES = {
'ldap_test': ['authdata.datasources.ldap_base', 'TestLDAPDataSource', {
'host': 'ldaps://1.2.3.4',
'username': 'uid=foo,ou=Bar,dc=zap,dc=csc,dc=fi',
'password': 'password'
}],
'dreamschool': ['authdata.datasources.dreamschool', 'DreamschoolDataSource', {
'api_url': 'https://foo.fi/api/2/user/',
'username': 'username',
'password': 'password',
}],
}
AUTH_EXTERNAL_ATTRIBUTE_BINDING = {
'ldap_test': 'ldap_test',
'dreamschool': 'dreamschool',
}
AUTH_EXTERNAL_MUNICIPALITY_BINDING = {
'Foo': 'ldap_test',
'Bar': 'dreamschool',
}
AUTHDATA_DREAMSCHOOL_ORG_MAP = {
u'bar': {u'school1': 3, u'äö school': 1},
}
class TestExternalDataSource(TestCase):
def setUp(self):
self.o = ExternalDataSource()
def test_init(self):
self.assertTrue(self.o)
def test_provision_user(self):
obj = self.o
obj.external_source = 'foo'
obj.provision_user(oid='oid', external_id='foo')
self.assertEqual(models.User.objects.filter(username='oid').count(), 1)
self.assertEqual(models.Source.objects.filter(name='local').count(), 1)
self.assertEqual(models.Attribute.objects.count(), 1)
self.assertEqual(models.UserAttribute.objects.count(), 1)
def test_oid(self):
with self.assertRaises(NotImplementedError):
self.o.get_oid(username='foo')
def test_data(self):
with self.assertRaises(NotImplementedError):
self.o.get_data(external_id='foo')
def test_user_data(self):
with self.assertRaises(NotImplementedError):
self.o.get_user_data(request='foo')
@override_settings(AUTH_EXTERNAL_SOURCES=AUTH_EXTERNAL_SOURCES)
@override_settings(AUTH_EXTERNAL_ATTRIBUTE_BINDING=AUTH_EXTERNAL_ATTRIBUTE_BINDING)
@override_settings(AUTH_EXTERNAL_MUNICIPALITY_BINDING=AUTH_EXTERNAL_MUNICIPALITY_BINDING)
@override_settings(AUTHDATA_DREAMSCHOOL_ORG_MAP=AUTHDATA_DREAMSCHOOL_ORG_MAP)
class TestDreamschoolDataSource(TestCase):
def setUp(self):
self.o = authdata.datasources.dreamschool.DreamschoolDataSource(api_url='mock://foo',
username='foo', password='bar')
authdata.datasources.dreamschool.requests = mock.Mock()
authdata.datasources.dreamschool.requests.codes = requests.codes
data = {'objects': [
{'id': 123,
'username': 'user',
'first_name': 'first',
'last_name': 'last',
'roles': [
{
'permissions': [{
'code': authdata.datasources.dreamschool.TEACHER_PERM,
}],
'organisation': {'id': 1},
},
],
'user_groups': [
{
'organisation': {
'id': 1,
'title': 'Äö school',
},
'title': 'Group1',
},
],
}]
}
self.data = data
response_mock = mock.Mock()
response_mock.status_code = requests.codes.ok
response_mock.json.return_value = data
authdata.datasources.dreamschool.requests.get.return_value = response_mock
self.factory = RequestFactory()
def test_init(self):
self.assertTrue(self.o)
def test_oid(self):
oid = self.o.get_oid(username='foo')
self.assertTrue(oid.startswith('MPASSOID'))
self.assertEqual(len(oid), 30)
def test_user_data(self):
d = {'municipality': 'Bar', 'school': 'school1', 'group': 'Group1'}
request = self.factory.get('/foo', d)
data = self.o.get_user_data(request=request)
self.assertEqual(data['count'], 1)
self.assertEqual(data['next'], None)
self.assertEqual(data['previous'], None)
self.assertEqual(data['results'][0]['attributes'], [])
self.assertEqual(data['results'][0]['first_name'], 'first')
self.assertEqual(data['results'][0]['last_name'], 'last')
self.assertEqual(data['results'][0]['username'], 'MPASSOID.ea5f9ca03f6edf5a0409d')
roles = list(data['results'][0]['roles'])
expected_roles = [
{
'school': 'Äö school',
'role': 'teacher',
'group': 'Group1',
'municipality': u'Bar'
},
]
self.assertEqual(roles, expected_roles)
def test_user_data_api_fail(self):
response_mock = mock.Mock()
response_mock.status_code = 500
response_mock.json.return_value = self.data
authdata.datasources.dreamschool.requests.get.return_value = response_mock
d = {'municipality': 'Bar', 'school': 'school1', 'group': 'Group1'}
request = self.factory.get('/foo', d)
data = self.o.get_user_data(request=request)
self.assertEqual(data['count'], 0)
self.assertEqual(data['next'], None)
self.assertEqual(data['previous'], None)
self.assertEqual(data['results'], [])
def test_user_data_api_parse_json_fail(self):
response_mock = mock.Mock()
response_mock.status_code = 200
response_mock.json.side_effect = ValueError('foo')
authdata.datasources.dreamschool.requests.get.return_value = response_mock
d = {'municipality': 'Bar', 'school': 'school1', 'group': 'Group1'}
request = self.factory.get('/foo', d)
data = self.o.get_user_data(request=request)
self.assertEqual(data['count'], 0)
self.assertEqual(data['next'], None)
self.assertEqual(data['previous'], None)
self.assertEqual(data['results'], [])
def test_get_municipality_by_org_id(self):
org_id = 1
municipality = self.o._get_municipality_by_org_id(org_id)
self.assertEqual(municipality, u'Bar')
@override_settings(AUTHDATA_DREAMSCHOOL_ORG_MAP={})
def test_get_municipality_by_org_id_not_in_settings(self):
org_id = 1
municipality = self.o._get_municipality_by_org_id(org_id)
self.assertEqual(municipality, u'')
def test_get_roles_from_userdata_student(self):
userdata = {
'roles': [
{
'permissions': [{'code': 'foo'}],
'organisation': {'id': 1},
},
],
'user_groups': [
{
'organisation': {
'id': 1,
'title': 'Äö school',
},
'title': 'Group1',
},
],
}
roles = list(self.o._get_roles(userdata))
expected_roles = [
{
"school": "Äö school",
"role": "student",
"group": "Group1",
"municipality": u"Bar"
},
]
self.assertEqual(roles, expected_roles)
def test_get_roles_from_userdata_teacher(self):
userdata = {
'roles': [
{
'permissions': [{
'code': authdata.datasources.dreamschool.TEACHER_PERM,
}],
'organisation': {'id': 1},
},
],
'user_groups': [
{
'organisation': {
'id': 1,
'title': 'Äö school',
},
'title': 'Group1',
},
],
}
roles = list(self.o._get_roles(userdata))
expected_roles = [
{
'school': 'Äö school',
'role': 'teacher',
'group': 'Group1',
'municipality': u'Bar'
},
]
self.assertEqual(roles, expected_roles)
def test_get_org_id_not_configured(self):
municipality = ''
school = ''
self.assertFalse(self.o._get_org_id(municipality, school))
def test_get_org_id(self):
municipality = u'Bar'
school = u'äö school'
expected_org_id = 1
org_id = self.o._get_org_id(municipality=municipality, school=school)
self.assertEqual(org_id, expected_org_id)
municipality = u'Foo'
school = u'äö school'
org_id = self.o._get_org_id(municipality=municipality, school=school)
self.assertEqual(org_id, None)
municipality = u'Bar'
school = u'school1'
expected_org_id = 3
org_id = self.o._get_org_id(municipality=municipality, school=school)
self.assertEqual(org_id, expected_org_id)
def test_get_data(self):
external_id = '123'
data = {
'id': 123,
'username': 'User',
'first_name': 'First',
'last_name': 'Last',
'roles': [
{
'permissions': [{
'code': authdata.datasources.dreamschool.TEACHER_PERM,
}],
'organisation': {'id': 1},
},
],
'user_groups': [
{
'organisation': {
'id': 1,
'title': 'Äö school',
},
'title': 'Group1',
},
],
}
response_mock = mock.Mock()
response_mock.status_code = requests.codes.ok
response_mock.json.return_value = data
authdata.datasources.dreamschool.requests.get.return_value = response_mock
data = self.o.get_data(external_id=external_id)
data['roles'] = list(data['roles'])
expected_data = {
'attributes': [],
'username': 'MPASSOID.08153889bda7b8ffd5a4d',
'first_name': 'First',
'last_name': 'Last',
'roles': [{
'school': 'Äö school',
'role': 'teacher',
'group': 'Group1',
'municipality': u'Bar'
}],
}
self.assertEqual(data, expected_data)
def test_get_data_api_fail(self):
external_id = '123'
data = {
'id': 123,
'username': 'User',
'first_name': 'First',
'last_name': 'Last',
'roles': [
{
'permissions': [{
'code': authdata.datasources.dreamschool.TEACHER_PERM,
}],
'organisation': {'id': 1},
},
],
'user_groups': [
{
'organisation': {
'id': 1,
'title': 'Äö school',
},
'title': 'Group1',
},
],
}
response_mock = mock.Mock()
response_mock.status_code = 500
response_mock.json.return_value = data
authdata.datasources.dreamschool.requests.get.return_value = response_mock
data = self.o.get_data(external_id=external_id)
self.assertEqual(data, None)
def test_get_data_json_parse_fail(self):
external_id = '123'
data = {
'id': 123,
'username': 'User',
'first_name': 'First',
'last_name': 'Last',
'roles': [
{
'permissions': [{
'code': authdata.datasources.dreamschool.TEACHER_PERM,
}],
'organisation': {'id': 1},
},
],
'user_groups': [
{
'organisation': {
'id': 1,
'title': 'Äö school',
},
'title': 'Group1',
},
],
}
response_mock = mock.Mock()
response_mock.status_code = 200
response_mock.json.side_effect = ValueError('foo')
authdata.datasources.dreamschool.requests.get.return_value = response_mock
data = self.o.get_data(external_id=external_id)
self.assertEqual(data, None)
class TestLDAPDataSource(TestCase):
def setUp(self):
self.obj = authdata.datasources.ldap_base.LDAPDataSource(host='host',
username='foo', password='bar', external_source='foo')
self.obj.ldap = mock.Mock()
def test_init(self):
self.assertTrue(self.obj)
self.assertEqual(self.obj.external_source, 'foo')
def test_connect(self):
self.obj.connect()
def test_query(self):
self.obj.query(query_filter=None)
def test_get_municipality_id(self):
muni_id = self.obj.get_municipality_id(name='foo')
self.assertEqual(muni_id, 'foo')
self.obj.municipality_id_map = {'a': '123'}
muni_id = self.obj.get_municipality_id(name='a')
self.assertEqual(muni_id, '123')
def test_get_school_id(self):
muni_id = self.obj.get_school_id(name='foo')
self.assertEqual(muni_id, 'foo')
self.obj.school_id_map = {'a': '123'}
muni_id = self.obj.get_school_id(name='a')
self.assertEqual(muni_id, '123')
class TestLdapTest(TestCase):
def setUp(self):
self.obj = authdata.datasources.ldap_base.TestLDAPDataSource(host='host',
username='foo', password='bar', external_source='foo')
self.obj.ldap = mock.Mock()
def test_init(self):
self.assertTrue(self.obj)
self.assertEqual(self.obj.external_source, 'foo')
def test_school_id_map(self):
name = u'Ääkkös abc 123'
mapper = self.obj.school_id_map()
self.assertEqual('00123', mapper.get(name))
def test_oid(self):
username = 'abc-123'
expected_oid = 'MPASSOID.c5af545a6479eb503ce5d'
oid = self.obj.get_oid(username)
self.assertEqual(oid, expected_oid)
self.assertEqual(len(oid), 30)
def test_get_data_index_error(self):
with mock.patch.object(self.obj, 'query') as mock_query:
mock_query.side_effect = IndexError('foo')
data = self.obj.get_data(external_id=123)
self.assertEqual(data, None)
def test_get_data(self):
self.assertFalse(authdata.models.User.objects.count())
r = [(
'cn=bar,ou=Opettajat,ou=People,ou=LdapKoulu1,ou=KuntaYksi,dc=mpass-test,dc=csc,dc=fi',
{'cn': ['bar'],
'givenName': ['First'],
'mail': ['bar@mpass-test.invalid'],
'objectClass': ['top', 'inetOrgPerson'],
'sn': ['Opettaja10013'],
'title': ['Opettaja'],
'uid': ['bar'],
'userPassword': ['foo'],
'departmentNumber': ['Group1'],
}
)]
with mock.patch.object(self.obj, 'query', return_value=r):
query_result = self.obj.get_data(external_id=123)
expected_data = {
'username': 'MPASSOID.c38029f36d3aebd850cfb',
'last_name': 'Opettaja10013',
'first_name': 'First',
'roles': [
{
'group': 'Group1',
'municipality': 'KuntaYksi',
'role': 'Opettaja',
'school': 'LdapKoulu1',
}],
'attributes': [],
}
self.assertEqual(query_result, expected_data)
# User is provisioned
self.assertEquals(authdata.models.User.objects.count(), 1)
def test_get_user_data(self):
self.assertFalse(authdata.models.User.objects.count())
r = [(
'cn=bar,ou=Opettajat,ou=People,ou=LdapKoulu1,ou=KuntaYksi,dc=mpass-test,dc=csc,dc=fi',
{'cn': ['bar'],
'givenName': ['First'],
'mail': ['bar@mpass-test.invalid'],
'objectClass': ['top', 'inetOrgPerson'],
'sn': ['Opettaja10013'],
'title': ['Opettaja'],
'uid': ['bar'],
'userPassword': ['foo'],
'departmentNumber': ['Group1'],
}
)]
mock_request = mock.Mock()
mock_request.GET = {'school': u'Ääkkösschool', 'group': u'Ääkköskoulu'}
with mock.patch.object(self.obj, 'query', return_value=r):
query_result = self.obj.get_user_data(request=mock_request)
expected_data = {
'count': 1,
'next': None,
'previous': None,
'results': [{'attributes': [],
'first_name': 'First',
'last_name': 'Opettaja10013',
'roles': [{'group': 'Group1',
'municipality': '1234567-8',
'role': 'Opettaja',
'school': '00001'}],
'username': 'MPASSOID.c38029f36d3aebd850cfb'}]
}
self.assertEqual(query_result, expected_data)
# User is provisioned
self.assertEquals(authdata.models.User.objects.count(), 1)
class TestOuluLDAPDataSource(TestCase):
def setUp(self):
self.obj = authdata.datasources.oulu.OuluLDAPDataSource(base_dn='base',
host='host', username='foo', password='bar', external_source='foo')
self.obj.ldap = mock.Mock()
self.q_results = [(
'cn=bar,ou=Opettajat,ou=People,ou=LdapKoulu1,ou=KuntaYksi,dc=mpass-test,dc=csc,dc=fi',
{'cn': ['bar'],
'givenName': ['First'],
'mail': ['bar@mpass-test.invalid'],
'objectClass': ['top', 'inetOrgPerson'],
'sn': ['Last'],
'title': ['Opettaja'],
'uid': ['uid1'],
'userPassword': ['password1'],
'department': ['Group1'],
'objectGUID': ['username1'],
'physicalDeliveryOfficeName': ['School1'],
}
)]
def test_init(self):
self.assertTrue(self.obj)
self.assertEqual(self.obj.external_source, 'foo')
def test_school_id_map(self):
self.assertEqual(self.obj.school_id_map.get(u'Ääkkös koulu 123'), None)
self.assertEqual(self.obj.school_id_map.get(u'Herukan koulu'), '06347')
def test_connect(self):
self.obj.connect()
def test_oid(self):
username = 'abc-123'
expected_oid = 'MPASSOID.1a1786a2133f1751de913'
oid = self.obj.get_oid(username)
self.assertEqual(oid, expected_oid)
self.assertEqual(len(oid), 30)
def test_external_id(self):
query_result = ('foo', {})
with self.assertRaises(KeyError):
self.obj.get_external_id(query_result)
result = self.obj.get_external_id(query_result=self.q_results[0])
self.assertEqual(result, 'uid1')
def test_username(self):
result = self.obj.get_username(query_result=self.q_results[0])
self.assertEqual(result, 'username1')
def test_first_name(self):
result = self.obj.get_first_name(query_result=self.q_results[0])
self.assertEqual(result, 'First')
def test_last_name(self):
result = self.obj.get_last_name(query_result=self.q_results[0])
self.assertEqual(result, 'Last')
def test_get_municipality(self):
result = self.obj.get_municipality()
self.assertEqual(result, 'Oulu')
def test_school(self):
result = self.obj.get_school(query_result=self.q_results[0])
self.assertEqual(result, 'School1')
def test_role(self):
result = self.obj.get_role(query_result=self.q_results[0])
self.assertEqual(result, 'Opettaja')
def test_group(self):
result = self.obj.get_group(query_result=self.q_results[0])
self.assertEqual(result, 'Group1')
def test_get_data_index_error(self):
username = base64.b64encode('username1')
with mock.patch.object(self.obj, 'query') as mock_query:
mock_query.side_effect = IndexError('foo')
data = self.obj.get_data(external_id=username)
self.assertEqual(data, None)
def test_get_data(self):
self.assertFalse(authdata.models.User.objects.count())
username = base64.b64encode('username1')
with mock.patch.object(self.obj, 'query', return_value=self.q_results):
query_result = self.obj.get_data(external_id=username)
expected_data = {
'username': 'MPASSOID.b51110b8d091b6792abde',
'last_name': 'Last',
'first_name': 'First',
'roles': [
{
'group': 'Group1',
'municipality': '0187690-1',
'role': 'Opettaja',
'school': 'School1',
}],
'attributes': [],
}
self.assertEqual(query_result, expected_data)
# User is provisioned
self.assertEquals(authdata.models.User.objects.count(), 1)
def test_get_user_data(self):
self.assertFalse(authdata.models.User.objects.count())
mock_request = mock.Mock()
mock_request.GET = {'school': u'Ääkkösschool', 'group': u'Ääkköskoulu'}
with mock.patch.object(self.obj, 'query', return_value=self.q_results):
query_result = self.obj.get_user_data(request=mock_request)
expected_data = {
'count': 1,
'next': None,
'previous': None,
'results': [
{'username': 'MPASSOID.b51110b8d091b6792abde',
'last_name': 'Last',
'first_name': 'First',
'roles': [
{
'group': 'Group1',
'municipality': '0187690-1',
'role': 'Opettaja',
'school': 'School1',
}],
'attributes': [],
}
]
}
self.assertEqual(query_result, expected_data)
# User is provisioned
self.assertEquals(authdata.models.User.objects.count(), 1)
# vim: tabstop=2 expandtab shiftwidth=2 softtabstop=2
| mit | -370,593,344,231,204,350 | -2,780,202,390,754,837,500 | 29.512894 | 94 | 0.59785 | false |
springmerchant/pybbm | pybb/admin.py | 7 | 4809 | # -*- coding: utf-8
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from django.core.urlresolvers import reverse
from pybb.models import Category, Forum, Topic, Post, Profile, Attachment, PollAnswer
from pybb import compat, util
username_field = compat.get_username_field()
class ForumInlineAdmin(admin.TabularInline):
model = Forum
fields = ['name', 'hidden', 'position']
extra = 0
class CategoryAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
list_display = ['name', 'position', 'hidden', 'forum_count']
list_per_page = 20
ordering = ['position']
search_fields = ['name']
list_editable = ['position']
inlines = [ForumInlineAdmin]
class ForumAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
list_display = ['name', 'category', 'hidden', 'position', 'topic_count', ]
list_per_page = 20
raw_id_fields = ['moderators']
ordering = ['-category']
search_fields = ['name', 'category__name']
list_editable = ['position', 'hidden']
fieldsets = (
(None, {
'fields': ('category', 'parent', 'name', 'hidden', 'position', )
}
),
(_('Additional options'), {
'classes': ('collapse',),
'fields': ('updated', 'description', 'headline', 'post_count', 'moderators', 'slug')
}
),
)
class PollAnswerAdmin(admin.TabularInline):
model = PollAnswer
fields = ['text', ]
extra = 0
class TopicAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
list_display = ['name', 'forum', 'created', 'head', 'post_count', 'poll_type',]
list_per_page = 20
raw_id_fields = ['user', 'subscribers']
ordering = ['-created']
date_hierarchy = 'created'
search_fields = ['name']
fieldsets = (
(None, {
'fields': ('forum', 'name', 'user', ('created', 'updated'), 'poll_type',)
}
),
(_('Additional options'), {
'classes': ('collapse',),
'fields': (('views', 'post_count'), ('sticky', 'closed'), 'subscribers', 'slug')
}
),
)
inlines = [PollAnswerAdmin, ]
class TopicReadTrackerAdmin(admin.ModelAdmin):
list_display = ['topic', 'user', 'time_stamp']
search_fields = ['user__%s' % username_field]
class ForumReadTrackerAdmin(admin.ModelAdmin):
list_display = ['forum', 'user', 'time_stamp']
search_fields = ['user__%s' % username_field]
class PostAdmin(admin.ModelAdmin):
list_display = ['topic', 'user', 'created', 'updated', 'summary']
list_per_page = 20
raw_id_fields = ['user', 'topic']
ordering = ['-created']
date_hierarchy = 'created'
search_fields = ['body']
fieldsets = (
(None, {
'fields': ('topic', 'user')
}
),
(_('Additional options'), {
'classes': ('collapse',),
'fields' : (('created', 'updated'), 'user_ip')
}
),
(_('Message'), {
'fields': ('body', 'body_html', 'body_text')
}
),
)
class ProfileAdmin(admin.ModelAdmin):
list_display = ['user', 'time_zone', 'language', 'post_count']
list_per_page = 20
ordering = ['-user']
search_fields = ['user__%s' % username_field]
fieldsets = (
(None, {
'fields': ('time_zone', 'language')
}
),
(_('Additional options'), {
'classes': ('collapse',),
'fields' : ('avatar', 'signature', 'show_signatures')
}
),
)
class AttachmentAdmin(admin.ModelAdmin):
list_display = ['file', 'size', 'admin_view_post', 'admin_edit_post']
def admin_view_post(self, obj):
return '<a href="%s">view</a>' % obj.post.get_absolute_url()
admin_view_post.allow_tags = True
admin_view_post.short_description = _('View post')
def admin_edit_post(self, obj):
return '<a href="%s">edit</a>' % reverse('admin:pybb_post_change', args=[obj.post.pk])
admin_edit_post.allow_tags = True
admin_edit_post.short_description = _('Edit post')
admin.site.register(Category, CategoryAdmin)
admin.site.register(Forum, ForumAdmin)
admin.site.register(Topic, TopicAdmin)
admin.site.register(Post, PostAdmin)
admin.site.register(Attachment, AttachmentAdmin)
if util.get_pybb_profile_model() == Profile:
admin.site.register(Profile, ProfileAdmin)
# This can be used to debug read/unread trackers
#admin.site.register(TopicReadTracker, TopicReadTrackerAdmin)
#admin.site.register(ForumReadTracker, ForumReadTrackerAdmin) | bsd-2-clause | 3,344,626,550,395,060,700 | 7,234,368,197,268,465,000 | 30.233766 | 100 | 0.571013 | false |
sergeLabo/asyncio-osc | irc/schedule.py | 22 | 3089 | """
Classes for calling functions a schedule.
"""
import datetime
import numbers
class DelayedCommand(datetime.datetime):
"""
A command to be executed after some delay (seconds or timedelta).
Clients may override .now() to have dates interpreted in a different
manner, such as to use UTC or to have timezone-aware times.
"""
@classmethod
def now(self, tzinfo=None):
return datetime.datetime.now(tzinfo)
@classmethod
def from_datetime(cls, other):
return cls(other.year, other.month, other.day, other.hour,
other.minute, other.second, other.microsecond,
other.tzinfo)
@classmethod
def after(cls, delay, function):
if not isinstance(delay, datetime.timedelta):
delay = datetime.timedelta(seconds=delay)
due_time = cls.now() + delay
cmd = cls.from_datetime(due_time)
cmd.delay = delay
cmd.function = function
return cmd
@classmethod
def at_time(cls, at, function):
"""
Construct a DelayedCommand to come due at `at`, where `at` may be
a datetime or timestamp. If `at` is a real number, it will be
interpreted as a naive local timestamp.
"""
if isinstance(at, numbers.Real):
at = datetime.datetime.fromtimestamp(at)
cmd = cls.from_datetime(at)
cmd.delay = at - cmd.now()
cmd.function = function
return cmd
def due(self):
return self.now() >= self
class PeriodicCommand(DelayedCommand):
"""
Like a delayed command, but expect this command to run every delay
seconds.
"""
def next(self):
cmd = self.__class__.from_datetime(self + self.delay)
cmd.delay = self.delay
cmd.function = self.function
return cmd
def __setattr__(self, key, value):
if key == 'delay' and not value > datetime.timedelta():
raise ValueError("A PeriodicCommand must have a positive, "
"non-zero delay.")
super(PeriodicCommand, self).__setattr__(key, value)
class PeriodicCommandFixedDelay(PeriodicCommand):
"""
Like a periodic command, but don't calculate the delay based on
the current time. Instead use a fixed delay following the initial
run.
"""
@classmethod
def at_time(cls, at, delay, function):
if isinstance(at, int):
at = datetime.datetime.fromtimestamp(at)
cmd = cls.from_datetime(at)
if not isinstance(delay, datetime.timedelta):
delay = datetime.timedelta(seconds=delay)
cmd.delay = delay
cmd.function = function
return cmd
@classmethod
def daily_at(cls, at, function):
"""
Schedule a command to run at a specific time each day.
"""
daily = datetime.timedelta(days=1)
# convert when to the next datetime matching this time
when = datetime.datetime.combine(datetime.date.today(), at)
if when < cls.now():
when += daily
return cls.at_time(when, daily, function)
| gpl-2.0 | -6,307,110,219,182,298,000 | -7,022,046,957,011,461,000 | 30.845361 | 73 | 0.618323 | false |
sunlianqiang/kbengine | kbe/src/lib/python/Lib/re.py | 206 | 15262 | #
# Secret Labs' Regular Expression Engine
#
# re-compatible interface for the sre matching engine
#
# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
#
# This version of the SRE library can be redistributed under CNRI's
# Python 1.6 license. For any other use, please contact Secret Labs
# AB (info@pythonware.com).
#
# Portions of this engine have been developed in cooperation with
# CNRI. Hewlett-Packard provided funding for 1.6 integration and
# other compatibility work.
#
r"""Support for regular expressions (RE).
This module provides regular expression matching operations similar to
those found in Perl. It supports both 8-bit and Unicode strings; both
the pattern and the strings being processed can contain null bytes and
characters outside the US ASCII range.
Regular expressions can contain both special and ordinary characters.
Most ordinary characters, like "A", "a", or "0", are the simplest
regular expressions; they simply match themselves. You can
concatenate ordinary characters, so last matches the string 'last'.
The special characters are:
"." Matches any character except a newline.
"^" Matches the start of the string.
"$" Matches the end of the string or just before the newline at
the end of the string.
"*" Matches 0 or more (greedy) repetitions of the preceding RE.
Greedy means that it will match as many repetitions as possible.
"+" Matches 1 or more (greedy) repetitions of the preceding RE.
"?" Matches 0 or 1 (greedy) of the preceding RE.
*?,+?,?? Non-greedy versions of the previous three special characters.
{m,n} Matches from m to n repetitions of the preceding RE.
{m,n}? Non-greedy version of the above.
"\\" Either escapes special characters or signals a special sequence.
[] Indicates a set of characters.
A "^" as the first character indicates a complementing set.
"|" A|B, creates an RE that will match either A or B.
(...) Matches the RE inside the parentheses.
The contents can be retrieved or matched later in the string.
(?aiLmsux) Set the A, I, L, M, S, U, or X flag for the RE (see below).
(?:...) Non-grouping version of regular parentheses.
(?P<name>...) The substring matched by the group is accessible by name.
(?P=name) Matches the text matched earlier by the group named name.
(?#...) A comment; ignored.
(?=...) Matches if ... matches next, but doesn't consume the string.
(?!...) Matches if ... doesn't match next.
(?<=...) Matches if preceded by ... (must be fixed length).
(?<!...) Matches if not preceded by ... (must be fixed length).
(?(id/name)yes|no) Matches yes pattern if the group with id/name matched,
the (optional) no pattern otherwise.
The special sequences consist of "\\" and a character from the list
below. If the ordinary character is not on the list, then the
resulting RE will match the second character.
\number Matches the contents of the group of the same number.
\A Matches only at the start of the string.
\Z Matches only at the end of the string.
\b Matches the empty string, but only at the start or end of a word.
\B Matches the empty string, but not at the start or end of a word.
\d Matches any decimal digit; equivalent to the set [0-9] in
bytes patterns or string patterns with the ASCII flag.
In string patterns without the ASCII flag, it will match the whole
range of Unicode digits.
\D Matches any non-digit character; equivalent to [^\d].
\s Matches any whitespace character; equivalent to [ \t\n\r\f\v] in
bytes patterns or string patterns with the ASCII flag.
In string patterns without the ASCII flag, it will match the whole
range of Unicode whitespace characters.
\S Matches any non-whitespace character; equivalent to [^\s].
\w Matches any alphanumeric character; equivalent to [a-zA-Z0-9_]
in bytes patterns or string patterns with the ASCII flag.
In string patterns without the ASCII flag, it will match the
range of Unicode alphanumeric characters (letters plus digits
plus underscore).
With LOCALE, it will match the set [0-9_] plus characters defined
as letters for the current locale.
\W Matches the complement of \w.
\\ Matches a literal backslash.
This module exports the following functions:
match Match a regular expression pattern to the beginning of a string.
fullmatch Match a regular expression pattern to all of a string.
search Search a string for the presence of a pattern.
sub Substitute occurrences of a pattern found in a string.
subn Same as sub, but also return the number of substitutions made.
split Split a string by the occurrences of a pattern.
findall Find all occurrences of a pattern in a string.
finditer Return an iterator yielding a match object for each match.
compile Compile a pattern into a RegexObject.
purge Clear the regular expression cache.
escape Backslash all non-alphanumerics in a string.
Some of the functions in this module takes flags as optional parameters:
A ASCII For string patterns, make \w, \W, \b, \B, \d, \D
match the corresponding ASCII character categories
(rather than the whole Unicode categories, which is the
default).
For bytes patterns, this flag is the only available
behaviour and needn't be specified.
I IGNORECASE Perform case-insensitive matching.
L LOCALE Make \w, \W, \b, \B, dependent on the current locale.
M MULTILINE "^" matches the beginning of lines (after a newline)
as well as the string.
"$" matches the end of lines (before a newline) as well
as the end of the string.
S DOTALL "." matches any character at all, including the newline.
X VERBOSE Ignore whitespace and comments for nicer looking RE's.
U UNICODE For compatibility only. Ignored for string patterns (it
is the default), and forbidden for bytes patterns.
This module also defines an exception 'error'.
"""
import sys
import sre_compile
import sre_parse
# public symbols
__all__ = [ "match", "fullmatch", "search", "sub", "subn", "split", "findall",
"compile", "purge", "template", "escape", "A", "I", "L", "M", "S", "X",
"U", "ASCII", "IGNORECASE", "LOCALE", "MULTILINE", "DOTALL", "VERBOSE",
"UNICODE", "error" ]
__version__ = "2.2.1"
# flags
A = ASCII = sre_compile.SRE_FLAG_ASCII # assume ascii "locale"
I = IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE # ignore case
L = LOCALE = sre_compile.SRE_FLAG_LOCALE # assume current 8-bit locale
U = UNICODE = sre_compile.SRE_FLAG_UNICODE # assume unicode "locale"
M = MULTILINE = sre_compile.SRE_FLAG_MULTILINE # make anchors look for newline
S = DOTALL = sre_compile.SRE_FLAG_DOTALL # make dot match newline
X = VERBOSE = sre_compile.SRE_FLAG_VERBOSE # ignore whitespace and comments
# sre extensions (experimental, don't rely on these)
T = TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE # disable backtracking
DEBUG = sre_compile.SRE_FLAG_DEBUG # dump pattern after compilation
# sre exception
error = sre_compile.error
# --------------------------------------------------------------------
# public interface
def match(pattern, string, flags=0):
"""Try to apply the pattern at the start of the string, returning
a match object, or None if no match was found."""
return _compile(pattern, flags).match(string)
def fullmatch(pattern, string, flags=0):
"""Try to apply the pattern to all of the string, returning
a match object, or None if no match was found."""
return _compile(pattern, flags).fullmatch(string)
def search(pattern, string, flags=0):
"""Scan through string looking for a match to the pattern, returning
a match object, or None if no match was found."""
return _compile(pattern, flags).search(string)
def sub(pattern, repl, string, count=0, flags=0):
"""Return the string obtained by replacing the leftmost
non-overlapping occurrences of the pattern in string by the
replacement repl. repl can be either a string or a callable;
if a string, backslash escapes in it are processed. If it is
a callable, it's passed the match object and must return
a replacement string to be used."""
return _compile(pattern, flags).sub(repl, string, count)
def subn(pattern, repl, string, count=0, flags=0):
"""Return a 2-tuple containing (new_string, number).
new_string is the string obtained by replacing the leftmost
non-overlapping occurrences of the pattern in the source
string by the replacement repl. number is the number of
substitutions that were made. repl can be either a string or a
callable; if a string, backslash escapes in it are processed.
If it is a callable, it's passed the match object and must
return a replacement string to be used."""
return _compile(pattern, flags).subn(repl, string, count)
def split(pattern, string, maxsplit=0, flags=0):
"""Split the source string by the occurrences of the pattern,
returning a list containing the resulting substrings. If
capturing parentheses are used in pattern, then the text of all
groups in the pattern are also returned as part of the resulting
list. If maxsplit is nonzero, at most maxsplit splits occur,
and the remainder of the string is returned as the final element
of the list."""
return _compile(pattern, flags).split(string, maxsplit)
def findall(pattern, string, flags=0):
"""Return a list of all non-overlapping matches in the string.
If one or more capturing groups are present in the pattern, return
a list of groups; this will be a list of tuples if the pattern
has more than one group.
Empty matches are included in the result."""
return _compile(pattern, flags).findall(string)
if sys.hexversion >= 0x02020000:
__all__.append("finditer")
def finditer(pattern, string, flags=0):
"""Return an iterator over all non-overlapping matches in the
string. For each match, the iterator returns a match object.
Empty matches are included in the result."""
return _compile(pattern, flags).finditer(string)
def compile(pattern, flags=0):
"Compile a regular expression pattern, returning a pattern object."
return _compile(pattern, flags)
def purge():
"Clear the regular expression caches"
_cache.clear()
_cache_repl.clear()
def template(pattern, flags=0):
"Compile a template pattern, returning a pattern object"
return _compile(pattern, flags|T)
_alphanum_str = frozenset(
"_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890")
_alphanum_bytes = frozenset(
b"_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890")
def escape(pattern):
"""
Escape all the characters in pattern except ASCII letters, numbers and '_'.
"""
if isinstance(pattern, str):
alphanum = _alphanum_str
s = list(pattern)
for i, c in enumerate(pattern):
if c not in alphanum:
if c == "\000":
s[i] = "\\000"
else:
s[i] = "\\" + c
return "".join(s)
else:
alphanum = _alphanum_bytes
s = []
esc = ord(b"\\")
for c in pattern:
if c in alphanum:
s.append(c)
else:
if c == 0:
s.extend(b"\\000")
else:
s.append(esc)
s.append(c)
return bytes(s)
# --------------------------------------------------------------------
# internals
_cache = {}
_cache_repl = {}
_pattern_type = type(sre_compile.compile("", 0))
_MAXCACHE = 512
def _compile(pattern, flags):
# internal: compile pattern
bypass_cache = flags & DEBUG
if not bypass_cache:
try:
return _cache[type(pattern), pattern, flags]
except KeyError:
pass
if isinstance(pattern, _pattern_type):
if flags:
raise ValueError(
"Cannot process flags argument with a compiled pattern")
return pattern
if not sre_compile.isstring(pattern):
raise TypeError("first argument must be string or compiled pattern")
p = sre_compile.compile(pattern, flags)
if not bypass_cache:
if len(_cache) >= _MAXCACHE:
_cache.clear()
_cache[type(pattern), pattern, flags] = p
return p
def _compile_repl(repl, pattern):
# internal: compile replacement pattern
try:
return _cache_repl[repl, pattern]
except KeyError:
pass
p = sre_parse.parse_template(repl, pattern)
if len(_cache_repl) >= _MAXCACHE:
_cache_repl.clear()
_cache_repl[repl, pattern] = p
return p
def _expand(pattern, match, template):
# internal: match.expand implementation hook
template = sre_parse.parse_template(template, pattern)
return sre_parse.expand_template(template, match)
def _subx(pattern, template):
# internal: pattern.sub/subn implementation helper
template = _compile_repl(template, pattern)
if not template[0] and len(template[1]) == 1:
# literal replacement
return template[1][0]
def filter(match, template=template):
return sre_parse.expand_template(template, match)
return filter
# register myself for pickling
import copyreg
def _pickle(p):
return _compile, (p.pattern, p.flags)
copyreg.pickle(_pattern_type, _pickle, _compile)
# --------------------------------------------------------------------
# experimental stuff (see python-dev discussions for details)
class Scanner:
def __init__(self, lexicon, flags=0):
from sre_constants import BRANCH, SUBPATTERN
self.lexicon = lexicon
# combine phrases into a compound pattern
p = []
s = sre_parse.Pattern()
s.flags = flags
for phrase, action in lexicon:
p.append(sre_parse.SubPattern(s, [
(SUBPATTERN, (len(p)+1, sre_parse.parse(phrase, flags))),
]))
s.groups = len(p)+1
p = sre_parse.SubPattern(s, [(BRANCH, (None, p))])
self.scanner = sre_compile.compile(p)
def scan(self, string):
result = []
append = result.append
match = self.scanner.scanner(string).match
i = 0
while 1:
m = match()
if not m:
break
j = m.end()
if i == j:
break
action = self.lexicon[m.lastindex-1][1]
if callable(action):
self.match = m
action = action(self, m.group())
if action is not None:
append(action)
i = j
return result, string[i:]
| lgpl-3.0 | 7,020,326,794,137,342,000 | 6,096,421,245,984,183,000 | 40.472826 | 79 | 0.641528 | false |
2013Commons/HUE-SHARK | desktop/core/ext-py/django_nose/build/lib.linux-i686-2.7/django_nose/nose_runner.py | 4 | 2660 | """
Django test runner that invokes nose.
Usage:
./manage.py test DJANGO_ARGS -- NOSE_ARGS
The 'test' argument, and any other args before '--', will not be passed
to nose, allowing django args and nose args to coexist.
You can use
NOSE_ARGS = ['list', 'of', 'args']
in settings.py for arguments that you always want passed to nose.
"""
import sys
from django.conf import settings
from django.db import connection
from django.test import utils
import nose
SETUP_ENV = 'setup_test_environment'
TEARDOWN_ENV = 'teardown_test_environment'
def get_test_enviroment_functions():
"""The functions setup_test_environment and teardown_test_environment in
<appname>.tests modules will be automatically called before and after
running the tests.
"""
setup_funcs = []
teardown_funcs = []
for app_name in settings.INSTALLED_APPS:
mod = __import__(app_name, None, None, ['tests'])
if hasattr(mod, 'tests'):
if hasattr(mod.tests, SETUP_ENV):
setup_funcs.append(getattr(mod.tests, SETUP_ENV))
if hasattr(mod.tests, TEARDOWN_ENV):
teardown_funcs.append(getattr(mod.tests, TEARDOWN_ENV))
return setup_funcs, teardown_funcs
def setup_test_environment(setup_funcs):
utils.setup_test_environment()
for func in setup_funcs:
func()
def teardown_test_environment(teardown_funcs):
utils.teardown_test_environment()
for func in teardown_funcs:
func()
def run_tests_explicit(nose_args, verbosity=1, interactive=True):
"""Setup django and run nose with given arguments."""
setup_funcs, teardown_funcs = get_test_enviroment_functions()
# Prepare django for testing.
setup_test_environment(setup_funcs)
old_db_name = settings.DATABASE_NAME
connection.creation.create_test_db(verbosity, autoclobber=not interactive)
# Pretend it's a production environment.
settings.DEBUG = False
ret = nose.run(argv=nose_args)
# Clean up django.
connection.creation.destroy_test_db(old_db_name, verbosity)
teardown_test_environment(teardown_funcs)
return ret
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
"""Calculates nose arguments and runs tests."""
nose_argv = ['nosetests']
if hasattr(settings, 'NOSE_ARGS'):
nose_argv.extend(settings.NOSE_ARGS)
# Everything after '--' is passed to nose.
if '--' in sys.argv:
hyphen_pos = sys.argv.index('--')
nose_argv.extend(sys.argv[hyphen_pos + 1:])
if verbosity >= 1:
print ' '.join(nose_argv)
return run_tests_explicit(nose_argv, verbosity, interactive)
| apache-2.0 | -5,254,829,247,542,400,000 | 3,871,414,028,871,798,000 | 28.555556 | 78 | 0.678947 | false |
kotton21/PotteryOnline | server.py | 1 | 2940 | #!/usr/bin/env python
import SimpleHTTPServer
import SocketServer
import PotGenerator
import os
import logging
import logging.handlers
import sys
import time
import rotate3D2
from urlparse import urlparse, parse_qs
try:
os.chdir('/home/pi/PotteryOnline/')
except:
print 'executing from current direcory'
PORT = 8080
# Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
class MySimpleHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
"""Serve a GET request."""
#generate the pot before the stuff
print "this is the path.............................."
print "path: %s"%self.path
print self.path == "/mythree.js/examples/my_webgl_loader_stl2.html"
if self.path.strip() == "/fig.jpg":
print "new fig request"
polyLimits = (-.1,.1,-.03,.03,-.0001,.0001)
g = PotGenerator.PolyPotGenerator(polyLimits)
print g.numCurves,': ',[round(c,2) for poly in g for c in poly]
g.plot(True)
if self.path == "/mythree.js/examples/my_webgl_loader_stl2.html":
print "new json pot request"
polyLimits = (-.1,.1,-.03,.03,-.0001,.0001)
g = PotGenerator.PolyPotGenerator(polyLimits)
print g.numCurves,': ',[round(c,2) for poly in g for c in poly]
#g.plot(True)
shape = g.zipPoints()
filename = "./mythree.js/examples/models/json/shape3d.json"
rotate3D2.build_3d_shape(shape, 20, filename)
f = self.send_head()
if f:
try:
#parse the query
query_components = parse_qs(urlparse(self.path).query)
#res = query_components["res"]
print 'components %s'%query_components
print urlparse(self.path).query
self.copyfile(f, self.wfile)
finally:
f.close()
#logging from http://blog.scphillips.com/posts/2013/07/getting-a-python-script-to-run-in-the-background-as-a-service-on-boot/
LOG_FILENAME = "./logs/server.log"
LOG_LEVEL = logging.INFO
logger = logging.getLogger(__name__)
logger.setLevel(LOG_LEVEL)
logHandler = logging.handlers.TimedRotatingFileHandler(LOG_FILENAME, when="midnight", backupCount=7)
formatter = logging.Formatter('%(asctime)s %(levelname)-8s %(message)s')
logHandler.setFormatter(formatter)
logger.addHandler(logHandler)
class MyLogger(object):
def __init__(self, logger, level):
"""Needs a logger and a Logger level."""
self.logger = logger
self.level = level
def write(self, message):
if message.rstrip() != "":
self.logger.log(self.level, message.rstrip())
sys.stdout = MyLogger(logger, logging.INFO)
sys.stderr = MyLogger(logger, logging.ERROR)
#end logging
#usage: logger.info("bla bla") or print "..."
logger.info("starting server")
Handler = MySimpleHTTPRequestHandler
httpd = SocketServer.TCPServer(("", PORT), Handler)
logger.info("serving at port" + str(PORT))
print "serving at port", PORT
try:
httpd.serve_forever()
except KeyboardInterrupt:
httpd.server_close()
#import daemon
#from spam import do_main_program
#with daemon.DaemonContext():
# httpd.serve_forever()
| mit | 4,431,916,924,521,846,300 | 8,587,244,806,935,464,000 | 27.269231 | 125 | 0.708163 | false |
CloudWareChile/OpenChile | openerp/addons/l10n_fr/l10n_fr.py | 9 | 1966 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
class l10n_fr_report(osv.osv):
_name = 'l10n.fr.report'
_description = 'Report for l10n_fr'
_columns = {
'code': fields.char('Code', size=64),
'name': fields.char('Name', size=128),
'line_ids': fields.one2many('l10n.fr.line', 'report_id', 'Lines'),
}
_sql_constraints = [
('code_uniq', 'unique (code)','The code report must be unique !')
]
l10n_fr_report()
class l10n_fr_line(osv.osv):
_name = 'l10n.fr.line'
_description = 'Report Lines for l10n_fr'
_columns = {
'code': fields.char('Variable Name', size=64),
'definition': fields.char('Definition', size=512),
'name': fields.char('Name', size=256),
'report_id': fields.many2one('l10n.fr.report', 'Report'),
}
_sql_constraints = [
('code_uniq', 'unique (code)', 'The variable name must be unique !')
]
l10n_fr_line()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | agpl-3.0 | -3,188,220,407,255,566,300 | -1,539,973,651,175,473,200 | 35.425926 | 78 | 0.595626 | false |
xiaojunwu/crosswalk-test-suite | webapi/tct-netinfo-w3c-tests/inst.apk.py | 903 | 3180 | #!/usr/bin/env python
import os
import shutil
import glob
import time
import sys
import subprocess
from optparse import OptionParser, make_option
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PARAMETERS = None
ADB_CMD = "adb"
def doCMD(cmd):
# Do not need handle timeout in this short script, let tool do it
print "-->> \"%s\"" % cmd
output = []
cmd_return_code = 1
cmd_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
while True:
output_line = cmd_proc.stdout.readline().strip("\r\n")
cmd_return_code = cmd_proc.poll()
if output_line == '' and cmd_return_code != None:
break
sys.stdout.write("%s\n" % output_line)
sys.stdout.flush()
output.append(output_line)
return (cmd_return_code, output)
def uninstPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
for file in files:
if file.endswith(".apk"):
cmd = "%s -s %s uninstall org.xwalk.%s" % (
ADB_CMD, PARAMETERS.device, os.path.basename(os.path.splitext(file)[0]))
(return_code, output) = doCMD(cmd)
for line in output:
if "Failure" in line:
action_status = False
break
return action_status
def instPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
for file in files:
if file.endswith(".apk"):
cmd = "%s -s %s install %s" % (ADB_CMD,
PARAMETERS.device, os.path.join(root, file))
(return_code, output) = doCMD(cmd)
for line in output:
if "Failure" in line:
action_status = False
break
return action_status
def main():
try:
usage = "usage: inst.py -i"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-s", dest="device", action="store", help="Specify device")
opts_parser.add_option(
"-i", dest="binstpkg", action="store_true", help="Install package")
opts_parser.add_option(
"-u", dest="buninstpkg", action="store_true", help="Uninstall package")
global PARAMETERS
(PARAMETERS, args) = opts_parser.parse_args()
except Exception, e:
print "Got wrong option: %s, exit ..." % e
sys.exit(1)
if not PARAMETERS.device:
(return_code, output) = doCMD("adb devices")
for line in output:
if str.find(line, "\tdevice") != -1:
PARAMETERS.device = line.split("\t")[0]
break
if not PARAMETERS.device:
print "No device found"
sys.exit(1)
if PARAMETERS.binstpkg and PARAMETERS.buninstpkg:
print "-i and -u are conflict"
sys.exit(1)
if PARAMETERS.buninstpkg:
if not uninstPKGs():
sys.exit(1)
else:
if not instPKGs():
sys.exit(1)
if __name__ == "__main__":
main()
sys.exit(0)
| bsd-3-clause | -5,356,460,330,546,938,000 | 4,993,402,353,939,268,000 | 28.719626 | 92 | 0.543396 | false |
smishenk/blink-crosswalk | Tools/Scripts/webkitpy/formatter/main.py | 48 | 4102 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import lib2to3.refactor
from webkitpy.common.system.systemhost import SystemHost
from webkitpy.thirdparty import autopep8
def parse_args(args=None):
parser = argparse.ArgumentParser()
parser.add_argument('--chromium', action='store_const', dest='style', const='chromium', default='blink',
help="Format according to Chromium's Python coding styles instead of Blink's.")
parser.add_argument('--no-backups', action='store_false', default=True, dest='backup',
help='Do not back up files before overwriting them.')
parser.add_argument('-j', '--jobs', metavar='n', type=int, default=0,
help='Number of parallel jobs; match CPU count if less than 1.')
parser.add_argument('files', nargs='*', default=['-'],
help="files to format or '-' for standard in")
parser.add_argument('--double-quote-strings', action='store_const', dest='quoting', const='double', default='single',
help='Rewrite string literals to use double quotes instead of single quotes.')
parser.add_argument('--no-autopep8', action='store_true',
help='Skip the autopep8 code-formatting step.')
parser.add_argument('--leave-strings-alone', action='store_true',
help='Do not reformat string literals to use a consistent quote style.')
return parser.parse_args(args=args)
def main(host=None, args=None):
options = parse_args(args)
if options.no_autopep8:
options.style = None
if options.leave_strings_alone:
options.quoting = None
autopep8_options = _autopep8_options_for_style(options.style)
fixers = _fixers_for_quoting(options.quoting)
if options.files == ['-']:
host = host or SystemHost()
host.print_(reformat_source(host.stdin.read(), autopep8_options, fixers, '<stdin>'), end='')
return
# We create the arglist before checking if we need to create a Host, because a
# real host is non-picklable and can't be passed to host.executive.map().
arglist = [(host, name, autopep8_options, fixers, options.backup) for name in options.files]
host = host or SystemHost()
host.executive.map(_reformat_thunk, arglist, processes=options.jobs)
def _autopep8_options_for_style(style):
return {
None: [],
'blink': autopep8.parse_args(['--aggressive',
'--max-line-length', '132',
'--indent-size', '4',
'']),
'chromium': autopep8.parse_args(['--aggressive',
'--max-line-length', '80',
'--indent-size', '2',
'']),
}.get(style)
def _fixers_for_quoting(quoting):
return {
None: [],
'double': ['webkitpy.formatter.fix_double_quote_strings'],
'single': ['webkitpy.formatter.fix_single_quote_strings'],
}.get(quoting)
def _reformat_thunk(args):
reformat_file(*args)
def reformat_file(host, name, autopep8_options, fixers, should_backup_file):
host = host or SystemHost()
source = host.filesystem.read_text_file(name)
dest = reformat_source(source, autopep8_options, fixers, name)
if dest != source:
if should_backup_file:
host.filesystem.write_text_file(name + '.bak', source)
host.filesystem.write_text_file(name, dest)
def reformat_source(source, autopep8_options, fixers, name):
tmp_str = source
if autopep8_options:
tmp_str = autopep8.fix_code(tmp_str, autopep8_options)
if fixers:
tool = lib2to3.refactor.RefactoringTool(fixer_names=fixers,
explicit=fixers)
tmp_str = unicode(tool.refactor_string(tmp_str, name=name))
return tmp_str
| bsd-3-clause | -8,198,990,787,835,085,000 | -1,520,202,777,489,293,000 | 38.825243 | 121 | 0.607021 | false |
yv84/pyph | src/tests/integrate_tests/tcp_echo.py | 1 | 5366 | #!/usr/bin/env python3
"""TCP echo server example."""
import argparse
import asyncio
import sys
import os
try:
import signal
except ImportError:
signal = None
from msg_log import Message
from game_log import log
ARGS = argparse.ArgumentParser(description="PyPh test fixtures.")
ARGS.add_argument(
'--server', action="store_true", dest='server',
default=False, help='Run tcp server')
ARGS.add_argument(
'--client', action="store_true", dest='client',
default=False, help='Run tcp client')
ARGS.add_argument(
'--host', action="store", dest='host',
default='127.0.0.1', help='Host name')
ARGS.add_argument(
'--port', action="store", dest='port',
default=9999, type=int, help='Port number')
ARGS.add_argument(
'--iocp', action="store_true", dest='iocp',
default=False, help='Use IOCP event loop')
ARGS.add_argument("--game", dest='game', type=str, required=False,
help='aa || l2', default='aa')
ARGS.add_argument("--l2_chronicle", dest='l2_chronicle', type=str, required=False,
help='so many options', default='gracia_final')
args = ARGS.parse_args()
# ---------------------------------------
if args.game == '_l2':
f = os.path.join(os.path.dirname(__file__), 'fixtures/l2', 'game_log_with_xor_len.log')
pattern = {'c': b'client:', 's': b'server:', 'start': 10, 'end': -2}
if args.game == 'l2':
f = os.path.join(os.path.dirname(__file__), 'fixtures/l2', 'game_log_15122012_with_pck_len.log')
pattern = {'c': b'client:', 's': b'server:', 'start': 10, 'end': -2}
elif args.game == 'aa':
f = os.path.join(os.path.dirname(__file__), 'fixtures/aa', 'game_1.log')
pattern = {'c': b"c->", 's': b"s->", 'start': 3, 'end': -2}
# ---------------------------------------
log = Message.get_log_from_file(f, pattern)
log, side_log = Message.game_log_from_import(log)
print(log, side_log)
class EchoServer(asyncio.Protocol):
TIMEOUT = 5.0
message_server = {}
def timeout(self):
print('connection timeout, closing.')
self.transport.close()
def connection_made(self, transport):
print('connection made')
self.transport = transport
self.message_server[self.transport] = Message('server', log=log, side_log=side_log)
# start 5 seconds timeout timer
self.h_timeout = asyncio.get_event_loop().call_later(
self.TIMEOUT, self.timeout)
def data_received(self, data):
#print('data received: ', data.decode())
#print('S: ', data)
#self.transport.write(b'Re: ' + data)
data = b''.join(self.message_server[self.transport](data))
if data:
self.transport.write(data)
else:
self.transport.close()
#print('S send: ', b''.join(self.message_server[self.transport](data)))
# restart timeout timer
self.h_timeout.cancel()
self.h_timeout = asyncio.get_event_loop().call_later(
self.TIMEOUT, self.timeout)
def eof_received(self):
pass
def connection_lost(self, exc):
print('connection lost:', exc)
self.h_timeout.cancel()
class EchoClient(asyncio.Protocol):
message = 'This is the message. It will be echoed.'
message_client = Message('client', log=log, side_log=side_log)
def connection_made(self, transport):
self.transport = transport
#print(b''.join(self.message_client(b'')))
self.transport.write(b''.join(self.message_client(b'')))
#self.transport.write(self.message.encode())
#print('data sent:', self.message)
def data_received(self, data):
#print('C:', data)
data = b''.join(self.message_client(data))
if data:
self.transport.write(data)
else:
self.transport.close()
# disconnect after 10 seconds
asyncio.get_event_loop().call_later(10.0, self.transport.close)
def eof_received(self):
pass
def connection_lost(self, exc):
print('connection lost:', exc)
asyncio.get_event_loop().stop()
def start_client(loop, host, port):
t = asyncio.Task(loop.create_connection(EchoClient, host, port))
loop.run_until_complete(t)
def start_server(loop, host, port):
f = loop.create_server(EchoServer, host, port)
return loop.run_until_complete(f)
if __name__ == '__main__':
if ':' in args.host:
args.host, port = args.host.split(':', 1)
args.port = int(port)
if (not (args.server or args.client)) or (args.server and args.client):
print('Please specify --server or --client\n')
ARGS.print_help()
else:
if args.iocp:
from asyncio import windows_events
loop = windows_events.ProactorEventLoop()
asyncio.set_event_loop(loop)
else:
loop = asyncio.get_event_loop()
print ('Using backend: {0}'.format(loop.__class__.__name__))
if signal is not None and sys.platform != 'win32':
loop.add_signal_handler(signal.SIGINT, loop.stop)
if args.server:
server = start_server(loop, args.host, args.port)
else:
start_client(loop, args.host, args.port)
try:
loop.run_forever()
finally:
if args.server:
server.close()
loop.close()
| mit | -5,225,289,628,288,812,000 | 2,354,804,376,711,754,000 | 30.197674 | 100 | 0.591875 | false |
dims/glance | glance/api/v2/model/metadef_namespace.py | 20 | 3021 | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import wsme
from wsme.rest import json
from wsme import types
from glance.api.v2.model.metadef_object import MetadefObject
from glance.api.v2.model.metadef_property_type import PropertyType
from glance.api.v2.model.metadef_resource_type import ResourceTypeAssociation
from glance.api.v2.model.metadef_tag import MetadefTag
from glance.common.wsme_utils import WSMEModelTransformer
class Namespace(types.Base, WSMEModelTransformer):
# Base fields
namespace = wsme.wsattr(types.text, mandatory=True)
display_name = wsme.wsattr(types.text, mandatory=False)
description = wsme.wsattr(types.text, mandatory=False)
visibility = wsme.wsattr(types.text, mandatory=False)
protected = wsme.wsattr(bool, mandatory=False)
owner = wsme.wsattr(types.text, mandatory=False)
# Not using datetime since time format has to be
# in oslo_utils.timeutils.isotime() format
created_at = wsme.wsattr(types.text, mandatory=False)
updated_at = wsme.wsattr(types.text, mandatory=False)
# Contained fields
resource_type_associations = wsme.wsattr([ResourceTypeAssociation],
mandatory=False)
properties = wsme.wsattr({types.text: PropertyType}, mandatory=False)
objects = wsme.wsattr([MetadefObject], mandatory=False)
tags = wsme.wsattr([MetadefTag], mandatory=False)
# Generated fields
self = wsme.wsattr(types.text, mandatory=False)
schema = wsme.wsattr(types.text, mandatory=False)
def __init__(cls, **kwargs):
super(Namespace, cls).__init__(**kwargs)
@staticmethod
def to_model_properties(db_property_types):
property_types = {}
for db_property_type in db_property_types:
# Convert the persisted json schema to a dict of PropertyTypes
property_type = json.fromjson(
PropertyType, db_property_type.schema)
property_type_name = db_property_type.name
property_types[property_type_name] = property_type
return property_types
class Namespaces(types.Base, WSMEModelTransformer):
namespaces = wsme.wsattr([Namespace], mandatory=False)
# Pagination
next = wsme.wsattr(types.text, mandatory=False)
schema = wsme.wsattr(types.text, mandatory=True)
first = wsme.wsattr(types.text, mandatory=True)
def __init__(self, **kwargs):
super(Namespaces, self).__init__(**kwargs)
| apache-2.0 | 6,021,092,503,278,235,000 | 9,130,860,712,435,450,000 | 37.240506 | 77 | 0.712016 | false |
sdphome/UHF_Reader | u-boot-2015.04/test/image/test-fit.py | 3 | 12492 | #!/usr/bin/python
#
# Copyright (c) 2013, Google Inc.
#
# Sanity check of the FIT handling in U-Boot
#
# SPDX-License-Identifier: GPL-2.0+
#
# To run this:
#
# make O=sandbox sandbox_config
# make O=sandbox
# ./test/image/test-fit.py -u sandbox/u-boot
import doctest
from optparse import OptionParser
import os
import shutil
import struct
import sys
import tempfile
# Enable printing of all U-Boot output
DEBUG = True
# The 'command' library in patman is convenient for running commands
base_path = os.path.dirname(sys.argv[0])
patman = os.path.join(base_path, '../../tools/patman')
sys.path.append(patman)
import command
# Define a base ITS which we can adjust using % and a dictionary
base_its = '''
/dts-v1/;
/ {
description = "Chrome OS kernel image with one or more FDT blobs";
#address-cells = <1>;
images {
kernel@1 {
data = /incbin/("%(kernel)s");
type = "kernel";
arch = "sandbox";
os = "linux";
compression = "none";
load = <0x40000>;
entry = <0x8>;
};
fdt@1 {
description = "snow";
data = /incbin/("u-boot.dtb");
type = "flat_dt";
arch = "sandbox";
%(fdt_load)s
compression = "none";
signature@1 {
algo = "sha1,rsa2048";
key-name-hint = "dev";
};
};
ramdisk@1 {
description = "snow";
data = /incbin/("%(ramdisk)s");
type = "ramdisk";
arch = "sandbox";
os = "linux";
%(ramdisk_load)s
compression = "none";
};
};
configurations {
default = "conf@1";
conf@1 {
kernel = "kernel@1";
fdt = "fdt@1";
%(ramdisk_config)s
};
};
};
'''
# Define a base FDT - currently we don't use anything in this
base_fdt = '''
/dts-v1/;
/ {
model = "Sandbox Verified Boot Test";
compatible = "sandbox";
};
'''
# This is the U-Boot script that is run for each test. First load the fit,
# then do the 'bootm' command, then save out memory from the places where
# we expect 'bootm' to write things. Then quit.
base_script = '''
sb load hostfs 0 %(fit_addr)x %(fit)s
fdt addr %(fit_addr)x
bootm start %(fit_addr)x
bootm loados
sb save hostfs 0 %(kernel_addr)x %(kernel_out)s %(kernel_size)x
sb save hostfs 0 %(fdt_addr)x %(fdt_out)s %(fdt_size)x
sb save hostfs 0 %(ramdisk_addr)x %(ramdisk_out)s %(ramdisk_size)x
reset
'''
def debug_stdout(stdout):
if DEBUG:
print stdout
def make_fname(leaf):
"""Make a temporary filename
Args:
leaf: Leaf name of file to create (within temporary directory)
Return:
Temporary filename
"""
global base_dir
return os.path.join(base_dir, leaf)
def filesize(fname):
"""Get the size of a file
Args:
fname: Filename to check
Return:
Size of file in bytes
"""
return os.stat(fname).st_size
def read_file(fname):
"""Read the contents of a file
Args:
fname: Filename to read
Returns:
Contents of file as a string
"""
with open(fname, 'r') as fd:
return fd.read()
def make_dtb():
"""Make a sample .dts file and compile it to a .dtb
Returns:
Filename of .dtb file created
"""
src = make_fname('u-boot.dts')
dtb = make_fname('u-boot.dtb')
with open(src, 'w') as fd:
print >>fd, base_fdt
command.Output('dtc', src, '-O', 'dtb', '-o', dtb)
return dtb
def make_its(params):
"""Make a sample .its file with parameters embedded
Args:
params: Dictionary containing parameters to embed in the %() strings
Returns:
Filename of .its file created
"""
its = make_fname('test.its')
with open(its, 'w') as fd:
print >>fd, base_its % params
return its
def make_fit(mkimage, params):
"""Make a sample .fit file ready for loading
This creates a .its script with the selected parameters and uses mkimage to
turn this into a .fit image.
Args:
mkimage: Filename of 'mkimage' utility
params: Dictionary containing parameters to embed in the %() strings
Return:
Filename of .fit file created
"""
fit = make_fname('test.fit')
its = make_its(params)
command.Output(mkimage, '-f', its, fit)
with open(make_fname('u-boot.dts'), 'w') as fd:
print >>fd, base_fdt
return fit
def make_kernel():
"""Make a sample kernel with test data
Returns:
Filename of kernel created
"""
fname = make_fname('test-kernel.bin')
data = ''
for i in range(100):
data += 'this kernel %d is unlikely to boot\n' % i
with open(fname, 'w') as fd:
print >>fd, data
return fname
def make_ramdisk():
"""Make a sample ramdisk with test data
Returns:
Filename of ramdisk created
"""
fname = make_fname('test-ramdisk.bin')
data = ''
for i in range(100):
data += 'ramdisk %d was seldom used in the middle ages\n' % i
with open(fname, 'w') as fd:
print >>fd, data
return fname
def find_matching(text, match):
"""Find a match in a line of text, and return the unmatched line portion
This is used to extract a part of a line from some text. The match string
is used to locate the line - we use the first line that contains that
match text.
Once we find a match, we discard the match string itself from the line,
and return what remains.
TODO: If this function becomes more generally useful, we could change it
to use regex and return groups.
Args:
text: Text to check (each line separated by \n)
match: String to search for
Return:
String containing unmatched portion of line
Exceptions:
ValueError: If match is not found
>>> find_matching('first line:10\\nsecond_line:20', 'first line:')
'10'
>>> find_matching('first line:10\\nsecond_line:20', 'second linex')
Traceback (most recent call last):
...
ValueError: Test aborted
>>> find_matching('first line:10\\nsecond_line:20', 'second_line:')
'20'
"""
for line in text.splitlines():
pos = line.find(match)
if pos != -1:
return line[:pos] + line[pos + len(match):]
print "Expected '%s' but not found in output:"
print text
raise ValueError('Test aborted')
def set_test(name):
"""Set the name of the current test and print a message
Args:
name: Name of test
"""
global test_name
test_name = name
print name
def fail(msg, stdout):
"""Raise an error with a helpful failure message
Args:
msg: Message to display
"""
print stdout
raise ValueError("Test '%s' failed: %s" % (test_name, msg))
def run_fit_test(mkimage, u_boot):
"""Basic sanity check of FIT loading in U-Boot
TODO: Almost everything:
- hash algorithms - invalid hash/contents should be detected
- signature algorithms - invalid sig/contents should be detected
- compression
- checking that errors are detected like:
- image overwriting
- missing images
- invalid configurations
- incorrect os/arch/type fields
- empty data
- images too large/small
- invalid FDT (e.g. putting a random binary in instead)
- default configuration selection
- bootm command line parameters should have desired effect
- run code coverage to make sure we are testing all the code
"""
global test_name
# Set up invariant files
control_dtb = make_dtb()
kernel = make_kernel()
ramdisk = make_ramdisk()
kernel_out = make_fname('kernel-out.bin')
fdt_out = make_fname('fdt-out.dtb')
ramdisk_out = make_fname('ramdisk-out.bin')
# Set up basic parameters with default values
params = {
'fit_addr' : 0x1000,
'kernel' : kernel,
'kernel_out' : kernel_out,
'kernel_addr' : 0x40000,
'kernel_size' : filesize(kernel),
'fdt_out' : fdt_out,
'fdt_addr' : 0x80000,
'fdt_size' : filesize(control_dtb),
'fdt_load' : '',
'ramdisk' : ramdisk,
'ramdisk_out' : ramdisk_out,
'ramdisk_addr' : 0xc0000,
'ramdisk_size' : filesize(ramdisk),
'ramdisk_load' : '',
'ramdisk_config' : '',
}
# Make a basic FIT and a script to load it
fit = make_fit(mkimage, params)
params['fit'] = fit
cmd = base_script % params
# First check that we can load a kernel
# We could perhaps reduce duplication with some loss of readability
set_test('Kernel load')
stdout = command.Output(u_boot, '-d', control_dtb, '-c', cmd)
debug_stdout(stdout)
if read_file(kernel) != read_file(kernel_out):
fail('Kernel not loaded', stdout)
if read_file(control_dtb) == read_file(fdt_out):
fail('FDT loaded but should be ignored', stdout)
if read_file(ramdisk) == read_file(ramdisk_out):
fail('Ramdisk loaded but should not be', stdout)
# Find out the offset in the FIT where U-Boot has found the FDT
line = find_matching(stdout, 'Booting using the fdt blob at ')
fit_offset = int(line, 16) - params['fit_addr']
fdt_magic = struct.pack('>L', 0xd00dfeed)
data = read_file(fit)
# Now find where it actually is in the FIT (skip the first word)
real_fit_offset = data.find(fdt_magic, 4)
if fit_offset != real_fit_offset:
fail('U-Boot loaded FDT from offset %#x, FDT is actually at %#x' %
(fit_offset, real_fit_offset), stdout)
# Now a kernel and an FDT
set_test('Kernel + FDT load')
params['fdt_load'] = 'load = <%#x>;' % params['fdt_addr']
fit = make_fit(mkimage, params)
stdout = command.Output(u_boot, '-d', control_dtb, '-c', cmd)
debug_stdout(stdout)
if read_file(kernel) != read_file(kernel_out):
fail('Kernel not loaded', stdout)
if read_file(control_dtb) != read_file(fdt_out):
fail('FDT not loaded', stdout)
if read_file(ramdisk) == read_file(ramdisk_out):
fail('Ramdisk loaded but should not be', stdout)
# Try a ramdisk
set_test('Kernel + FDT + Ramdisk load')
params['ramdisk_config'] = 'ramdisk = "ramdisk@1";'
params['ramdisk_load'] = 'load = <%#x>;' % params['ramdisk_addr']
fit = make_fit(mkimage, params)
stdout = command.Output(u_boot, '-d', control_dtb, '-c', cmd)
debug_stdout(stdout)
if read_file(ramdisk) != read_file(ramdisk_out):
fail('Ramdisk not loaded', stdout)
def run_tests():
"""Parse options, run the FIT tests and print the result"""
global base_path, base_dir
# Work in a temporary directory
base_dir = tempfile.mkdtemp()
parser = OptionParser()
parser.add_option('-u', '--u-boot',
default=os.path.join(base_path, 'u-boot'),
help='Select U-Boot sandbox binary')
parser.add_option('-k', '--keep', action='store_true',
help="Don't delete temporary directory even when tests pass")
parser.add_option('-t', '--selftest', action='store_true',
help='Run internal self tests')
(options, args) = parser.parse_args()
# Find the path to U-Boot, and assume mkimage is in its tools/mkimage dir
base_path = os.path.dirname(options.u_boot)
mkimage = os.path.join(base_path, 'tools/mkimage')
# There are a few doctests - handle these here
if options.selftest:
doctest.testmod()
return
title = 'FIT Tests'
print title, '\n', '=' * len(title)
run_fit_test(mkimage, options.u_boot)
print '\nTests passed'
print 'Caveat: this is only a sanity check - test coverage is poor'
# Remove the tempoerary directory unless we are asked to keep it
if options.keep:
print "Output files are in '%s'" % base_dir
else:
shutil.rmtree(base_dir)
run_tests()
| gpl-3.0 | 7,970,683,757,473,444,000 | 4,107,857,519,641,005,000 | 28.742857 | 79 | 0.579091 | false |
mahak/nova | nova/conf/hyperv.py | 4 | 10493 | # Copyright (c) 2016 TUBITAK BILGEM
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
hyperv_opt_group = cfg.OptGroup("hyperv",
title='The Hyper-V feature',
help="""
The hyperv feature allows you to configure the Hyper-V hypervisor
driver to be used within an OpenStack deployment.
""")
hyperv_opts = [
cfg.FloatOpt('dynamic_memory_ratio',
default=1.0,
help="""
Dynamic memory ratio
Enables dynamic memory allocation (ballooning) when set to a value
greater than 1. The value expresses the ratio between the total RAM
assigned to an instance and its startup RAM amount. For example a
ratio of 2.0 for an instance with 1024MB of RAM implies 512MB of
RAM allocated at startup.
Possible values:
* 1.0: Disables dynamic memory allocation (Default).
* Float values greater than 1.0: Enables allocation of total implied
RAM divided by this value for startup.
"""),
cfg.BoolOpt('enable_instance_metrics_collection',
default=False,
help="""
Enable instance metrics collection
Enables metrics collections for an instance by using Hyper-V's
metric APIs. Collected data can be retrieved by other apps and
services, e.g.: Ceilometer.
"""),
cfg.StrOpt('instances_path_share',
default="",
help="""
Instances path share
The name of a Windows share mapped to the "instances_path" dir
and used by the resize feature to copy files to the target host.
If left blank, an administrative share (hidden network share) will
be used, looking for the same "instances_path" used locally.
Possible values:
* "": An administrative share will be used (Default).
* Name of a Windows share.
Related options:
* "instances_path": The directory which will be used if this option
here is left blank.
"""),
cfg.BoolOpt('limit_cpu_features',
default=False,
help="""
Limit CPU features
This flag is needed to support live migration to hosts with
different CPU features and checked during instance creation
in order to limit the CPU features used by the instance.
"""),
cfg.IntOpt('mounted_disk_query_retry_count',
default=10,
min=0,
help="""
Mounted disk query retry count
The number of times to retry checking for a mounted disk.
The query runs until the device can be found or the retry
count is reached.
Possible values:
* Positive integer values. Values greater than 1 is recommended
(Default: 10).
Related options:
* Time interval between disk mount retries is declared with
"mounted_disk_query_retry_interval" option.
"""),
cfg.IntOpt('mounted_disk_query_retry_interval',
default=5,
min=0,
help="""
Mounted disk query retry interval
Interval between checks for a mounted disk, in seconds.
Possible values:
* Time in seconds (Default: 5).
Related options:
* This option is meaningful when the mounted_disk_query_retry_count
is greater than 1.
* The retry loop runs with mounted_disk_query_retry_count and
mounted_disk_query_retry_interval configuration options.
"""),
cfg.IntOpt('power_state_check_timeframe',
default=60,
min=0,
help="""
Power state check timeframe
The timeframe to be checked for instance power state changes.
This option is used to fetch the state of the instance from Hyper-V
through the WMI interface, within the specified timeframe.
Possible values:
* Timeframe in seconds (Default: 60).
"""),
cfg.IntOpt('power_state_event_polling_interval',
default=2,
min=0,
help="""
Power state event polling interval
Instance power state change event polling frequency. Sets the
listener interval for power state events to the given value.
This option enhances the internal lifecycle notifications of
instances that reboot themselves. It is unlikely that an operator
has to change this value.
Possible values:
* Time in seconds (Default: 2).
"""),
cfg.StrOpt('qemu_img_cmd',
default="qemu-img.exe",
help=r"""
qemu-img command
qemu-img is required for some of the image related operations
like converting between different image types. You can get it
from here: (http://qemu.weilnetz.de/) or you can install the
Cloudbase OpenStack Hyper-V Compute Driver
(https://cloudbase.it/openstack-hyperv-driver/) which automatically
sets the proper path for this config option. You can either give the
full path of qemu-img.exe or set its path in the PATH environment
variable and leave this option to the default value.
Possible values:
* Name of the qemu-img executable, in case it is in the same
directory as the nova-compute service or its path is in the
PATH environment variable (Default).
* Path of qemu-img command (DRIVELETTER:\PATH\TO\QEMU-IMG\COMMAND).
Related options:
* If the config_drive_cdrom option is False, qemu-img will be used to
convert the ISO to a VHD, otherwise the config drive will
remain an ISO. To use config drive with Hyper-V, you must
set the ``mkisofs_cmd`` value to the full path to an ``mkisofs.exe``
installation.
"""),
cfg.StrOpt('vswitch_name',
help="""
External virtual switch name
The Hyper-V Virtual Switch is a software-based layer-2 Ethernet
network switch that is available with the installation of the
Hyper-V server role. The switch includes programmatically managed
and extensible capabilities to connect virtual machines to both
virtual networks and the physical network. In addition, Hyper-V
Virtual Switch provides policy enforcement for security, isolation,
and service levels. The vSwitch represented by this config option
must be an external one (not internal or private).
Possible values:
* If not provided, the first of a list of available vswitches
is used. This list is queried using WQL.
* Virtual switch name.
"""),
cfg.IntOpt('wait_soft_reboot_seconds',
default=60,
min=0,
help="""
Wait soft reboot seconds
Number of seconds to wait for instance to shut down after soft
reboot request is made. We fall back to hard reboot if instance
does not shutdown within this window.
Possible values:
* Time in seconds (Default: 60).
"""),
cfg.BoolOpt('config_drive_cdrom',
default=False,
help="""
Mount config drive as a CD drive.
OpenStack can be configured to write instance metadata to a config drive, which
is then attached to the instance before it boots. The config drive can be
attached as a disk drive (default) or as a CD drive.
Related options:
* This option is meaningful with ``force_config_drive`` option set to ``True``
or when the REST API call to create an instance will have
``--config-drive=True`` flag.
* ``config_drive_format`` option must be set to ``iso9660`` in order to use
CD drive as the config drive image.
* To use config drive with Hyper-V, you must set the
``mkisofs_cmd`` value to the full path to an ``mkisofs.exe`` installation.
Additionally, you must set the ``qemu_img_cmd`` value to the full path
to an ``qemu-img`` command installation.
* You can configure the Compute service to always create a configuration
drive by setting the ``force_config_drive`` option to ``True``.
"""),
cfg.BoolOpt('config_drive_inject_password',
default=False,
help="""
Inject password to config drive.
When enabled, the admin password will be available from the config drive image.
Related options:
* This option is meaningful when used with other options that enable
config drive usage with Hyper-V, such as ``force_config_drive``.
"""),
cfg.IntOpt('volume_attach_retry_count',
default=10,
min=0,
help="""
Volume attach retry count
The number of times to retry attaching a volume. Volume attachment
is retried until success or the given retry count is reached.
Possible values:
* Positive integer values (Default: 10).
Related options:
* Time interval between attachment attempts is declared with
volume_attach_retry_interval option.
"""),
cfg.IntOpt('volume_attach_retry_interval',
default=5,
min=0,
help="""
Volume attach retry interval
Interval between volume attachment attempts, in seconds.
Possible values:
* Time in seconds (Default: 5).
Related options:
* This options is meaningful when volume_attach_retry_count
is greater than 1.
* The retry loop runs with volume_attach_retry_count and
volume_attach_retry_interval configuration options.
"""),
cfg.BoolOpt('enable_remotefx',
default=False,
help="""
Enable RemoteFX feature
This requires at least one DirectX 11 capable graphics adapter for
Windows / Hyper-V Server 2012 R2 or newer and RDS-Virtualization
feature has to be enabled.
Instances with RemoteFX can be requested with the following flavor
extra specs:
**os:resolution**. Guest VM screen resolution size. Acceptable values::
1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160
``3840x2160`` is only available on Windows / Hyper-V Server 2016.
**os:monitors**. Guest VM number of monitors. Acceptable values::
[1, 4] - Windows / Hyper-V Server 2012 R2
[1, 8] - Windows / Hyper-V Server 2016
**os:vram**. Guest VM VRAM amount. Only available on
Windows / Hyper-V Server 2016. Acceptable values::
64, 128, 256, 512, 1024
"""),
cfg.BoolOpt('use_multipath_io',
default=False,
help="""
Use multipath connections when attaching iSCSI or FC disks.
This requires the Multipath IO Windows feature to be enabled. MPIO must be
configured to claim such devices.
"""),
cfg.ListOpt('iscsi_initiator_list',
default=[],
help="""
List of iSCSI initiators that will be used for estabilishing iSCSI sessions.
If none are specified, the Microsoft iSCSI initiator service will choose the
initiator.
""")
]
def register_opts(conf):
conf.register_group(hyperv_opt_group)
conf.register_opts(hyperv_opts, group=hyperv_opt_group)
def list_opts():
return {hyperv_opt_group: hyperv_opts}
| apache-2.0 | -5,669,727,624,460,618,000 | 5,462,529,748,996,118,000 | 30.136499 | 79 | 0.725817 | false |
mjfarmer/scada_py | env/lib/python2.7/site-packages/zope/interface/tests/test_declarations.py | 18 | 57969 | ##############################################################################
#
# Copyright (c) 2003 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Test the new API for making and checking interface declarations
"""
import unittest
from zope.interface._compat import _skip_under_py3k, _u
class _Py3ClassAdvice(object):
def _run_generated_code(self, code, globs, locs,
fails_under_py3k=True,
):
import warnings
from zope.interface._compat import PYTHON3
with warnings.catch_warnings(record=True) as log:
warnings.resetwarnings()
if not PYTHON3:
exec(code, globs, locs)
self.assertEqual(len(log), 0) # no longer warn
return True
else:
try:
exec(code, globs, locs)
except TypeError:
return False
else:
if fails_under_py3k:
self.fail("Didn't raise TypeError")
class NamedTests(unittest.TestCase):
def test_class(self):
from zope.interface.declarations import named
@named(_u('foo'))
class Foo(object):
pass
self.assertEqual(Foo.__component_name__, _u('foo'))
def test_function(self):
from zope.interface.declarations import named
@named(_u('foo'))
def doFoo(object):
pass
self.assertEqual(doFoo.__component_name__, _u('foo'))
def test_instance(self):
from zope.interface.declarations import named
class Foo(object):
pass
foo = Foo()
named(_u('foo'))(foo)
self.assertEqual(foo.__component_name__, _u('foo'))
class DeclarationTests(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations import Declaration
return Declaration
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor_no_bases(self):
decl = self._makeOne()
self.assertEqual(list(decl.__bases__), [])
def test_ctor_w_interface_in_bases(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decl = self._makeOne(IFoo)
self.assertEqual(list(decl.__bases__), [IFoo])
def test_ctor_w_implements_in_bases(self):
from zope.interface.declarations import Implements
impl = Implements()
decl = self._makeOne(impl)
self.assertEqual(list(decl.__bases__), [impl])
def test_changed_wo_existing__v_attrs(self):
decl = self._makeOne()
decl.changed(decl) # doesn't raise
self.assertFalse('_v_attrs' in decl.__dict__)
def test_changed_w_existing__v_attrs(self):
decl = self._makeOne()
decl._v_attrs = object()
decl.changed(decl)
self.assertFalse('_v_attrs' in decl.__dict__)
def test___contains__w_self(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decl = self._makeOne()
self.assertFalse(decl in decl)
def test___contains__w_unrelated_iface(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decl = self._makeOne()
self.assertFalse(IFoo in decl)
def test___contains__w_base_interface(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decl = self._makeOne(IFoo)
self.assertTrue(IFoo in decl)
def test___iter___empty(self):
decl = self._makeOne()
self.assertEqual(list(decl), [])
def test___iter___single_base(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decl = self._makeOne(IFoo)
self.assertEqual(list(decl), [IFoo])
def test___iter___multiple_bases(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
decl = self._makeOne(IFoo, IBar)
self.assertEqual(list(decl), [IFoo, IBar])
def test___iter___inheritance(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar', (IFoo,))
decl = self._makeOne(IBar)
self.assertEqual(list(decl), [IBar]) #IBar.interfaces() omits bases
def test___iter___w_nested_sequence_overlap(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
decl = self._makeOne(IBar, (IFoo, IBar))
self.assertEqual(list(decl), [IBar, IFoo])
def test_flattened_empty(self):
from zope.interface.interface import Interface
decl = self._makeOne()
self.assertEqual(list(decl.flattened()), [Interface])
def test_flattened_single_base(self):
from zope.interface.interface import Interface
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decl = self._makeOne(IFoo)
self.assertEqual(list(decl.flattened()), [IFoo, Interface])
def test_flattened_multiple_bases(self):
from zope.interface.interface import Interface
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
decl = self._makeOne(IFoo, IBar)
self.assertEqual(list(decl.flattened()), [IFoo, IBar, Interface])
def test_flattened_inheritance(self):
from zope.interface.interface import Interface
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar', (IFoo,))
decl = self._makeOne(IBar)
self.assertEqual(list(decl.flattened()), [IBar, IFoo, Interface])
def test_flattened_w_nested_sequence_overlap(self):
from zope.interface.interface import Interface
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
decl = self._makeOne(IBar, (IFoo, IBar))
# Note that decl.__iro__ has IFoo first.
self.assertEqual(list(decl.flattened()), [IFoo, IBar, Interface])
def test___sub___unrelated_interface(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
before = self._makeOne(IFoo)
after = before - IBar
self.assertTrue(isinstance(after, self._getTargetClass()))
self.assertEqual(list(after), [IFoo])
def test___sub___related_interface(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
before = self._makeOne(IFoo)
after = before - IFoo
self.assertEqual(list(after), [])
def test___sub___related_interface_by_inheritance(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar', (IFoo,))
before = self._makeOne(IBar)
after = before - IBar
self.assertEqual(list(after), [])
def test___add___unrelated_interface(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
before = self._makeOne(IFoo)
after = before + IBar
self.assertTrue(isinstance(after, self._getTargetClass()))
self.assertEqual(list(after), [IFoo, IBar])
def test___add___related_interface(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
IBaz = InterfaceClass('IBaz')
before = self._makeOne(IFoo, IBar)
other = self._makeOne(IBar, IBaz)
after = before + other
self.assertEqual(list(after), [IFoo, IBar, IBaz])
class ImplementsTests(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations import Implements
return Implements
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor_no_bases(self):
impl = self._makeOne()
self.assertEqual(impl.inherit, None)
self.assertEqual(impl.declared, ())
self.assertEqual(impl.__name__, '?')
self.assertEqual(list(impl.__bases__), [])
def test___repr__(self):
impl = self._makeOne()
impl.__name__ = 'Testing'
self.assertEqual(repr(impl), '<implementedBy Testing>')
def test___reduce__(self):
from zope.interface.declarations import implementedBy
impl = self._makeOne()
self.assertEqual(impl.__reduce__(), (implementedBy, (None,)))
class Test_implementedByFallback(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import implementedByFallback
return implementedByFallback(*args, **kw)
def test_dictless_wo_existing_Implements_wo_registrations(self):
class Foo(object):
__slots__ = ('__implemented__',)
foo = Foo()
foo.__implemented__ = None
self.assertEqual(list(self._callFUT(foo)), [])
def test_dictless_wo_existing_Implements_cant_assign___implemented__(self):
class Foo(object):
def _get_impl(self): return None
def _set_impl(self, val): raise TypeError
__implemented__ = property(_get_impl, _set_impl)
def __call__(self): pass #act like a factory
foo = Foo()
self.assertRaises(TypeError, self._callFUT, foo)
def test_dictless_wo_existing_Implements_w_registrations(self):
from zope.interface import declarations
class Foo(object):
__slots__ = ('__implemented__',)
foo = Foo()
foo.__implemented__ = None
reg = object()
with _MonkeyDict(declarations,
'BuiltinImplementationSpecifications') as specs:
specs[foo] = reg
self.assertTrue(self._callFUT(foo) is reg)
def test_dictless_w_existing_Implements(self):
from zope.interface.declarations import Implements
impl = Implements()
class Foo(object):
__slots__ = ('__implemented__',)
foo = Foo()
foo.__implemented__ = impl
self.assertTrue(self._callFUT(foo) is impl)
def test_dictless_w_existing_not_Implements(self):
from zope.interface.interface import InterfaceClass
class Foo(object):
__slots__ = ('__implemented__',)
foo = Foo()
IFoo = InterfaceClass('IFoo')
foo.__implemented__ = (IFoo,)
self.assertEqual(list(self._callFUT(foo)), [IFoo])
def test_w_existing_attr_as_Implements(self):
from zope.interface.declarations import Implements
impl = Implements()
class Foo(object):
__implemented__ = impl
self.assertTrue(self._callFUT(Foo) is impl)
def test_builtins_added_to_cache(self):
from zope.interface import declarations
from zope.interface.declarations import Implements
from zope.interface._compat import _BUILTINS
with _MonkeyDict(declarations,
'BuiltinImplementationSpecifications') as specs:
self.assertEqual(list(self._callFUT(tuple)), [])
self.assertEqual(list(self._callFUT(list)), [])
self.assertEqual(list(self._callFUT(dict)), [])
for typ in (tuple, list, dict):
spec = specs[typ]
self.assertTrue(isinstance(spec, Implements))
self.assertEqual(repr(spec),
'<implementedBy %s.%s>'
% (_BUILTINS, typ.__name__))
def test_builtins_w_existing_cache(self):
from zope.interface import declarations
t_spec, l_spec, d_spec = object(), object(), object()
with _MonkeyDict(declarations,
'BuiltinImplementationSpecifications') as specs:
specs[tuple] = t_spec
specs[list] = l_spec
specs[dict] = d_spec
self.assertTrue(self._callFUT(tuple) is t_spec)
self.assertTrue(self._callFUT(list) is l_spec)
self.assertTrue(self._callFUT(dict) is d_spec)
def test_oldstyle_class_no_assertions(self):
# TODO: Figure out P3 story
class Foo:
pass
self.assertEqual(list(self._callFUT(Foo)), [])
def test_no_assertions(self):
# TODO: Figure out P3 story
class Foo(object):
pass
self.assertEqual(list(self._callFUT(Foo)), [])
def test_w_None_no_bases_not_factory(self):
class Foo(object):
__implemented__ = None
foo = Foo()
self.assertRaises(TypeError, self._callFUT, foo)
def test_w_None_no_bases_w_factory(self):
from zope.interface.declarations import objectSpecificationDescriptor
class Foo(object):
__implemented__ = None
def __call__(self):
pass
foo = Foo()
foo.__name__ = 'foo'
spec = self._callFUT(foo)
self.assertEqual(spec.__name__,
'zope.interface.tests.test_declarations.foo')
self.assertTrue(spec.inherit is foo)
self.assertTrue(foo.__implemented__ is spec)
self.assertTrue(foo.__providedBy__ is objectSpecificationDescriptor)
self.assertFalse('__provides__' in foo.__dict__)
def test_w_None_no_bases_w_class(self):
from zope.interface.declarations import ClassProvides
class Foo(object):
__implemented__ = None
spec = self._callFUT(Foo)
self.assertEqual(spec.__name__,
'zope.interface.tests.test_declarations.Foo')
self.assertTrue(spec.inherit is Foo)
self.assertTrue(Foo.__implemented__ is spec)
self.assertTrue(isinstance(Foo.__providedBy__, ClassProvides))
self.assertTrue(isinstance(Foo.__provides__, ClassProvides))
self.assertEqual(Foo.__provides__, Foo.__providedBy__)
def test_w_existing_Implements(self):
from zope.interface.declarations import Implements
impl = Implements()
class Foo(object):
__implemented__ = impl
self.assertTrue(self._callFUT(Foo) is impl)
class Test_implementedBy(Test_implementedByFallback):
# Repeat tests for C optimizations
def _callFUT(self, *args, **kw):
from zope.interface.declarations import implementedBy
return implementedBy(*args, **kw)
class Test_classImplementsOnly(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import classImplementsOnly
return classImplementsOnly(*args, **kw)
def test_no_existing(self):
from zope.interface.declarations import ClassProvides
from zope.interface.interface import InterfaceClass
class Foo(object):
pass
ifoo = InterfaceClass('IFoo')
self._callFUT(Foo, ifoo)
spec = Foo.__implemented__
self.assertEqual(spec.__name__,
'zope.interface.tests.test_declarations.Foo')
self.assertTrue(spec.inherit is None)
self.assertTrue(Foo.__implemented__ is spec)
self.assertTrue(isinstance(Foo.__providedBy__, ClassProvides))
self.assertTrue(isinstance(Foo.__provides__, ClassProvides))
self.assertEqual(Foo.__provides__, Foo.__providedBy__)
def test_w_existing_Implements(self):
from zope.interface.declarations import Implements
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
impl = Implements(IFoo)
impl.declared = (IFoo,)
class Foo(object):
__implemented__ = impl
impl.inherit = Foo
self._callFUT(Foo, IBar)
# Same spec, now different values
self.assertTrue(Foo.__implemented__ is impl)
self.assertEqual(impl.inherit, None)
self.assertEqual(impl.declared, (IBar,))
class Test_classImplements(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import classImplements
return classImplements(*args, **kw)
def test_no_existing(self):
from zope.interface.declarations import ClassProvides
from zope.interface.interface import InterfaceClass
class Foo(object):
pass
IFoo = InterfaceClass('IFoo')
self._callFUT(Foo, IFoo)
spec = Foo.__implemented__
self.assertEqual(spec.__name__,
'zope.interface.tests.test_declarations.Foo')
self.assertTrue(spec.inherit is Foo)
self.assertTrue(Foo.__implemented__ is spec)
self.assertTrue(isinstance(Foo.__providedBy__, ClassProvides))
self.assertTrue(isinstance(Foo.__provides__, ClassProvides))
self.assertEqual(Foo.__provides__, Foo.__providedBy__)
def test_w_existing_Implements(self):
from zope.interface.declarations import Implements
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
impl = Implements(IFoo)
impl.declared = (IFoo,)
class Foo(object):
__implemented__ = impl
impl.inherit = Foo
self._callFUT(Foo, IBar)
# Same spec, now different values
self.assertTrue(Foo.__implemented__ is impl)
self.assertEqual(impl.inherit, Foo)
self.assertEqual(impl.declared, (IFoo, IBar,))
def test_w_existing_Implements_w_bases(self):
from zope.interface.declarations import Implements
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
IBaz = InterfaceClass('IBaz', IFoo)
b_impl = Implements(IBaz)
impl = Implements(IFoo)
impl.declared = (IFoo,)
class Base1(object):
__implemented__ = b_impl
class Base2(object):
__implemented__ = b_impl
class Foo(Base1, Base2):
__implemented__ = impl
impl.inherit = Foo
self._callFUT(Foo, IBar)
# Same spec, now different values
self.assertTrue(Foo.__implemented__ is impl)
self.assertEqual(impl.inherit, Foo)
self.assertEqual(impl.declared, (IFoo, IBar,))
self.assertEqual(impl.__bases__, (IFoo, IBar, b_impl))
class Test__implements_advice(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import _implements_advice
return _implements_advice(*args, **kw)
def test_no_existing_implements(self):
from zope.interface.declarations import classImplements
from zope.interface.declarations import Implements
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
class Foo(object):
__implements_advice_data__ = ((IFoo,), classImplements)
self._callFUT(Foo)
self.assertFalse('__implements_advice_data__' in Foo.__dict__)
self.assertTrue(isinstance(Foo.__implemented__, Implements))
self.assertEqual(list(Foo.__implemented__), [IFoo])
class Test_implementer(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations import implementer
return implementer
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_oldstyle_class(self):
# TODO Py3 story
from zope.interface.declarations import ClassProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
class Foo:
pass
decorator = self._makeOne(IFoo)
returned = decorator(Foo)
self.assertTrue(returned is Foo)
spec = Foo.__implemented__
self.assertEqual(spec.__name__,
'zope.interface.tests.test_declarations.Foo')
self.assertTrue(spec.inherit is Foo)
self.assertTrue(Foo.__implemented__ is spec)
self.assertTrue(isinstance(Foo.__providedBy__, ClassProvides))
self.assertTrue(isinstance(Foo.__provides__, ClassProvides))
self.assertEqual(Foo.__provides__, Foo.__providedBy__)
def test_newstyle_class(self):
from zope.interface.declarations import ClassProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
class Foo(object):
pass
decorator = self._makeOne(IFoo)
returned = decorator(Foo)
self.assertTrue(returned is Foo)
spec = Foo.__implemented__
self.assertEqual(spec.__name__,
'zope.interface.tests.test_declarations.Foo')
self.assertTrue(spec.inherit is Foo)
self.assertTrue(Foo.__implemented__ is spec)
self.assertTrue(isinstance(Foo.__providedBy__, ClassProvides))
self.assertTrue(isinstance(Foo.__provides__, ClassProvides))
self.assertEqual(Foo.__provides__, Foo.__providedBy__)
def test_nonclass_cannot_assign_attr(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decorator = self._makeOne(IFoo)
self.assertRaises(TypeError, decorator, object())
def test_nonclass_can_assign_attr(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
class Foo(object):
pass
foo = Foo()
decorator = self._makeOne(IFoo)
returned = decorator(foo)
self.assertTrue(returned is foo)
spec = foo.__implemented__
self.assertEqual(spec.__name__, '?')
self.assertTrue(spec.inherit is None)
self.assertTrue(foo.__implemented__ is spec)
class Test_implementer_only(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations import implementer_only
return implementer_only
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_function(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decorator = self._makeOne(IFoo)
def _function(): pass
self.assertRaises(ValueError, decorator, _function)
def test_method(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
decorator = self._makeOne(IFoo)
class Bar:
def _method(): pass
self.assertRaises(ValueError, decorator, Bar._method)
def test_oldstyle_class(self):
# TODO Py3 story
from zope.interface.declarations import Implements
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
old_spec = Implements(IBar)
class Foo:
__implemented__ = old_spec
decorator = self._makeOne(IFoo)
returned = decorator(Foo)
self.assertTrue(returned is Foo)
spec = Foo.__implemented__
self.assertEqual(spec.__name__, '?')
self.assertTrue(spec.inherit is None)
self.assertTrue(Foo.__implemented__ is spec)
def test_newstyle_class(self):
from zope.interface.declarations import Implements
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass('IFoo')
IBar = InterfaceClass('IBar')
old_spec = Implements(IBar)
class Foo(object):
__implemented__ = old_spec
decorator = self._makeOne(IFoo)
returned = decorator(Foo)
self.assertTrue(returned is Foo)
spec = Foo.__implemented__
self.assertEqual(spec.__name__, '?')
self.assertTrue(spec.inherit is None)
self.assertTrue(Foo.__implemented__ is spec)
# Test '_implements' by way of 'implements{,Only}', its only callers.
class Test_implementsOnly(unittest.TestCase, _Py3ClassAdvice):
def _getFUT(self):
from zope.interface.declarations import implementsOnly
return implementsOnly
def test_simple(self):
import warnings
from zope.interface.declarations import implementsOnly
from zope.interface._compat import PYTHON3
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
globs = {'implementsOnly': implementsOnly,
'IFoo': IFoo,
}
locs = {}
CODE = "\n".join([
'class Foo(object):'
' implementsOnly(IFoo)',
])
with warnings.catch_warnings(record=True) as log:
warnings.resetwarnings()
try:
exec(CODE, globs, locs)
except TypeError:
if not PYTHON3:
raise
else:
if PYTHON3:
self.fail("Didn't raise TypeError")
Foo = locs['Foo']
spec = Foo.__implemented__
self.assertEqual(list(spec), [IFoo])
self.assertEqual(len(log), 0) # no longer warn
def test_called_once_from_class_w_bases(self):
from zope.interface.declarations import implements
from zope.interface.declarations import implementsOnly
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
globs = {'implements': implements,
'implementsOnly': implementsOnly,
'IFoo': IFoo,
'IBar': IBar,
}
locs = {}
CODE = "\n".join([
'class Foo(object):',
' implements(IFoo)',
'class Bar(Foo):'
' implementsOnly(IBar)',
])
if self._run_generated_code(CODE, globs, locs):
Bar = locs['Bar']
spec = Bar.__implemented__
self.assertEqual(list(spec), [IBar])
class Test_implements(unittest.TestCase, _Py3ClassAdvice):
def _getFUT(self):
from zope.interface.declarations import implements
return implements
def test_called_from_function(self):
import warnings
from zope.interface.declarations import implements
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
globs = {'implements': implements, 'IFoo': IFoo}
locs = {}
CODE = "\n".join([
'def foo():',
' implements(IFoo)'
])
if self._run_generated_code(CODE, globs, locs, False):
foo = locs['foo']
with warnings.catch_warnings(record=True) as log:
warnings.resetwarnings()
self.assertRaises(TypeError, foo)
self.assertEqual(len(log), 0) # no longer warn
def test_called_twice_from_class(self):
import warnings
from zope.interface.declarations import implements
from zope.interface.interface import InterfaceClass
from zope.interface._compat import PYTHON3
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
globs = {'implements': implements, 'IFoo': IFoo, 'IBar': IBar}
locs = {}
CODE = "\n".join([
'class Foo(object):',
' implements(IFoo)',
' implements(IBar)',
])
with warnings.catch_warnings(record=True) as log:
warnings.resetwarnings()
try:
exec(CODE, globs, locs)
except TypeError:
if not PYTHON3:
self.assertEqual(len(log), 0) # no longer warn
else:
self.fail("Didn't raise TypeError")
def test_called_once_from_class(self):
from zope.interface.declarations import implements
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
globs = {'implements': implements, 'IFoo': IFoo}
locs = {}
CODE = "\n".join([
'class Foo(object):',
' implements(IFoo)',
])
if self._run_generated_code(CODE, globs, locs):
Foo = locs['Foo']
spec = Foo.__implemented__
self.assertEqual(list(spec), [IFoo])
class ProvidesClassTests(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations import ProvidesClass
return ProvidesClass
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_simple_class_one_interface(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
spec = self._makeOne(Foo, IFoo)
self.assertEqual(list(spec), [IFoo])
def test___reduce__(self):
from zope.interface.declarations import Provides # the function
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
spec = self._makeOne(Foo, IFoo)
klass, args = spec.__reduce__()
self.assertTrue(klass is Provides)
self.assertEqual(args, (Foo, IFoo))
def test___get___class(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
spec = self._makeOne(Foo, IFoo)
Foo.__provides__ = spec
self.assertTrue(Foo.__provides__ is spec)
def test___get___instance(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
spec = self._makeOne(Foo, IFoo)
Foo.__provides__ = spec
def _test():
foo = Foo()
return foo.__provides__
self.assertRaises(AttributeError, _test)
class Test_Provides(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import Provides
return Provides(*args, **kw)
def test_no_cached_spec(self):
from zope.interface import declarations
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
cache = {}
class Foo(object):
pass
with _Monkey(declarations, InstanceDeclarations=cache):
spec = self._callFUT(Foo, IFoo)
self.assertEqual(list(spec), [IFoo])
self.assertTrue(cache[(Foo, IFoo)] is spec)
def test_w_cached_spec(self):
from zope.interface import declarations
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
prior = object()
class Foo(object):
pass
cache = {(Foo, IFoo): prior}
with _Monkey(declarations, InstanceDeclarations=cache):
spec = self._callFUT(Foo, IFoo)
self.assertTrue(spec is prior)
class Test_directlyProvides(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import directlyProvides
return directlyProvides(*args, **kw)
def test_w_normal_object(self):
from zope.interface.declarations import ProvidesClass
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
obj = Foo()
self._callFUT(obj, IFoo)
self.assertTrue(isinstance(obj.__provides__, ProvidesClass))
self.assertEqual(list(obj.__provides__), [IFoo])
def test_w_class(self):
from zope.interface.declarations import ClassProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
self._callFUT(Foo, IFoo)
self.assertTrue(isinstance(Foo.__provides__, ClassProvides))
self.assertEqual(list(Foo.__provides__), [IFoo])
@_skip_under_py3k
def test_w_non_descriptor_aware_metaclass(self):
# There are no non-descriptor-aware types in Py3k
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class MetaClass(type):
def __getattribute__(self, name):
# Emulate metaclass whose base is not the type object.
if name == '__class__':
return self
return type.__getattribute__(self, name)
class Foo(object):
__metaclass__ = MetaClass
obj = Foo()
self.assertRaises(TypeError, self._callFUT, obj, IFoo)
def test_w_classless_object(self):
from zope.interface.declarations import ProvidesClass
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
the_dict = {}
class Foo(object):
def __getattribute__(self, name):
# Emulate object w/o any class
if name == '__class__':
return None
try:
return the_dict[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
the_dict[name] = value
obj = Foo()
self._callFUT(obj, IFoo)
self.assertTrue(isinstance(the_dict['__provides__'], ProvidesClass))
self.assertEqual(list(the_dict['__provides__']), [IFoo])
class Test_alsoProvides(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import alsoProvides
return alsoProvides(*args, **kw)
def test_wo_existing_provides(self):
from zope.interface.declarations import ProvidesClass
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
obj = Foo()
self._callFUT(obj, IFoo)
self.assertTrue(isinstance(obj.__provides__, ProvidesClass))
self.assertEqual(list(obj.__provides__), [IFoo])
def test_w_existing_provides(self):
from zope.interface.declarations import directlyProvides
from zope.interface.declarations import ProvidesClass
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
class Foo(object):
pass
obj = Foo()
directlyProvides(obj, IFoo)
self._callFUT(obj, IBar)
self.assertTrue(isinstance(obj.__provides__, ProvidesClass))
self.assertEqual(list(obj.__provides__), [IFoo, IBar])
class Test_noLongerProvides(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import noLongerProvides
return noLongerProvides(*args, **kw)
def test_wo_existing_provides(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
obj = Foo()
self._callFUT(obj, IFoo)
self.assertEqual(list(obj.__provides__), [])
def test_w_existing_provides_hit(self):
from zope.interface.declarations import directlyProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
obj = Foo()
directlyProvides(obj, IFoo)
self._callFUT(obj, IFoo)
self.assertEqual(list(obj.__provides__), [])
def test_w_existing_provides_miss(self):
from zope.interface.declarations import directlyProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
class Foo(object):
pass
obj = Foo()
directlyProvides(obj, IFoo)
self._callFUT(obj, IBar)
self.assertEqual(list(obj.__provides__), [IFoo])
def test_w_iface_implemented_by_class(self):
from zope.interface.declarations import implementer
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
@implementer(IFoo)
class Foo(object):
pass
obj = Foo()
self.assertRaises(ValueError, self._callFUT, obj, IFoo)
class ClassProvidesBaseFallbackTests(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations import ClassProvidesBaseFallback
return ClassProvidesBaseFallback
def _makeOne(self, klass, implements):
# Don't instantiate directly: the C version can't have attributes
# assigned.
class Derived(self._getTargetClass()):
def __init__(self, k, i):
self._cls = k
self._implements = i
return Derived(klass, implements)
def test_w_same_class_via_class(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
cpbp = Foo.__provides__ = self._makeOne(Foo, IFoo)
self.assertTrue(Foo.__provides__ is cpbp)
def test_w_same_class_via_instance(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
foo = Foo()
cpbp = Foo.__provides__ = self._makeOne(Foo, IFoo)
self.assertTrue(foo.__provides__ is IFoo)
def test_w_different_class(self):
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
class Bar(Foo):
pass
bar = Bar()
cpbp = Foo.__provides__ = self._makeOne(Foo, IFoo)
self.assertRaises(AttributeError, getattr, Bar, '__provides__')
self.assertRaises(AttributeError, getattr, bar, '__provides__')
class ClassProvidesBaseTests(ClassProvidesBaseFallbackTests):
# Repeat tests for C optimizations
def _getTargetClass(self):
from zope.interface.declarations import ClassProvidesBase
return ClassProvidesBase
class ClassProvidesTests(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations import ClassProvides
return ClassProvides
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_w_simple_metaclass(self):
from zope.interface.declarations import implementer
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
@implementer(IFoo)
class Foo(object):
pass
cp = Foo.__provides__ = self._makeOne(Foo, type(Foo), IBar)
self.assertTrue(Foo.__provides__ is cp)
self.assertEqual(list(Foo().__provides__), [IFoo])
def test___reduce__(self):
from zope.interface.declarations import implementer
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
@implementer(IFoo)
class Foo(object):
pass
cp = Foo.__provides__ = self._makeOne(Foo, type(Foo), IBar)
self.assertEqual(cp.__reduce__(),
(self._getTargetClass(), (Foo, type(Foo), IBar)))
class Test_directlyProvidedBy(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import directlyProvidedBy
return directlyProvidedBy(*args, **kw)
def test_wo_declarations_in_class_or_instance(self):
class Foo(object):
pass
foo = Foo()
self.assertEqual(list(self._callFUT(foo)), [])
def test_w_declarations_in_class_but_not_instance(self):
from zope.interface.declarations import implementer
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
@implementer(IFoo)
class Foo(object):
pass
foo = Foo()
self.assertEqual(list(self._callFUT(foo)), [])
def test_w_declarations_in_instance_but_not_class(self):
from zope.interface.declarations import directlyProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
foo = Foo()
directlyProvides(foo, IFoo)
self.assertEqual(list(self._callFUT(foo)), [IFoo])
def test_w_declarations_in_instance_and_class(self):
from zope.interface.declarations import directlyProvides
from zope.interface.declarations import implementer
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
@implementer(IFoo)
class Foo(object):
pass
foo = Foo()
directlyProvides(foo, IBar)
self.assertEqual(list(self._callFUT(foo)), [IBar])
class Test_classProvides(unittest.TestCase, _Py3ClassAdvice):
def _getFUT(self):
from zope.interface.declarations import classProvides
return classProvides
def test_called_from_function(self):
import warnings
from zope.interface.declarations import classProvides
from zope.interface.interface import InterfaceClass
from zope.interface._compat import PYTHON3
IFoo = InterfaceClass("IFoo")
globs = {'classProvides': classProvides, 'IFoo': IFoo}
locs = {}
CODE = "\n".join([
'def foo():',
' classProvides(IFoo)'
])
exec(CODE, globs, locs)
foo = locs['foo']
with warnings.catch_warnings(record=True) as log:
warnings.resetwarnings()
self.assertRaises(TypeError, foo)
if not PYTHON3:
self.assertEqual(len(log), 0) # no longer warn
def test_called_twice_from_class(self):
import warnings
from zope.interface.declarations import classProvides
from zope.interface.interface import InterfaceClass
from zope.interface._compat import PYTHON3
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
globs = {'classProvides': classProvides, 'IFoo': IFoo, 'IBar': IBar}
locs = {}
CODE = "\n".join([
'class Foo(object):',
' classProvides(IFoo)',
' classProvides(IBar)',
])
with warnings.catch_warnings(record=True) as log:
warnings.resetwarnings()
try:
exec(CODE, globs, locs)
except TypeError:
if not PYTHON3:
self.assertEqual(len(log), 0) # no longer warn
else:
self.fail("Didn't raise TypeError")
def test_called_once_from_class(self):
from zope.interface.declarations import classProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
globs = {'classProvides': classProvides, 'IFoo': IFoo}
locs = {}
CODE = "\n".join([
'class Foo(object):',
' classProvides(IFoo)',
])
if self._run_generated_code(CODE, globs, locs):
Foo = locs['Foo']
spec = Foo.__providedBy__
self.assertEqual(list(spec), [IFoo])
# Test _classProvides_advice through classProvides, its only caller.
class Test_provider(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations import provider
return provider
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_w_class(self):
from zope.interface.declarations import ClassProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
@self._makeOne(IFoo)
class Foo(object):
pass
self.assertTrue(isinstance(Foo.__provides__, ClassProvides))
self.assertEqual(list(Foo.__provides__), [IFoo])
class Test_moduleProvides(unittest.TestCase):
def _getFUT(self):
from zope.interface.declarations import moduleProvides
return moduleProvides
def test_called_from_function(self):
from zope.interface.declarations import moduleProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
globs = {'__name__': 'zope.interface.tests.foo',
'moduleProvides': moduleProvides, 'IFoo': IFoo}
locs = {}
CODE = "\n".join([
'def foo():',
' moduleProvides(IFoo)'
])
exec(CODE, globs, locs)
foo = locs['foo']
self.assertRaises(TypeError, foo)
def test_called_from_class(self):
from zope.interface.declarations import moduleProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
globs = {'__name__': 'zope.interface.tests.foo',
'moduleProvides': moduleProvides, 'IFoo': IFoo}
locs = {}
CODE = "\n".join([
'class Foo(object):',
' moduleProvides(IFoo)',
])
try:
exec(CODE, globs, locs)
except TypeError:
pass
else:
assert False, 'TypeError not raised'
def test_called_once_from_module_scope(self):
from zope.interface.declarations import moduleProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
globs = {'__name__': 'zope.interface.tests.foo',
'moduleProvides': moduleProvides, 'IFoo': IFoo}
CODE = "\n".join([
'moduleProvides(IFoo)',
])
exec(CODE, globs)
spec = globs['__provides__']
self.assertEqual(list(spec), [IFoo])
def test_called_twice_from_module_scope(self):
from zope.interface.declarations import moduleProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
globs = {'__name__': 'zope.interface.tests.foo',
'moduleProvides': moduleProvides, 'IFoo': IFoo}
locs = {}
CODE = "\n".join([
'moduleProvides(IFoo)',
'moduleProvides(IFoo)',
])
try:
exec(CODE, globs)
except TypeError:
pass
else:
assert False, 'TypeError not raised'
class Test_getObjectSpecificationFallback(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import getObjectSpecificationFallback
return getObjectSpecificationFallback(*args, **kw)
def test_wo_existing_provides_classless(self):
the_dict = {}
class Foo(object):
def __getattribute__(self, name):
# Emulate object w/o any class
if name == '__class__':
raise AttributeError(name)
try:
return the_dict[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
the_dict[name] = value
foo = Foo()
spec = self._callFUT(foo)
self.assertEqual(list(spec), [])
def test_existing_provides_is_spec(self):
from zope.interface.declarations import directlyProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
def foo():
pass
directlyProvides(foo, IFoo)
spec = self._callFUT(foo)
self.assertTrue(spec is foo.__provides__)
def test_existing_provides_is_not_spec(self):
def foo():
pass
foo.__provides__ = object() # not a valid spec
spec = self._callFUT(foo)
self.assertEqual(list(spec), [])
def test_existing_provides(self):
from zope.interface.declarations import directlyProvides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
foo = Foo()
directlyProvides(foo, IFoo)
spec = self._callFUT(foo)
self.assertEqual(list(spec), [IFoo])
def test_wo_provides_on_class_w_implements(self):
from zope.interface.declarations import implementer
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
@implementer(IFoo)
class Foo(object):
pass
foo = Foo()
spec = self._callFUT(foo)
self.assertEqual(list(spec), [IFoo])
def test_wo_provides_on_class_wo_implements(self):
class Foo(object):
pass
foo = Foo()
spec = self._callFUT(foo)
self.assertEqual(list(spec), [])
class Test_getObjectSpecification(Test_getObjectSpecificationFallback):
# Repeat tests for C optimizations
def _callFUT(self, *args, **kw):
from zope.interface.declarations import getObjectSpecification
return getObjectSpecification(*args, **kw)
class Test_providedByFallback(unittest.TestCase):
def _callFUT(self, *args, **kw):
from zope.interface.declarations import providedByFallback
return providedByFallback(*args, **kw)
def test_wo_providedBy_on_class_wo_implements(self):
class Foo(object):
pass
foo = Foo()
spec = self._callFUT(foo)
self.assertEqual(list(spec), [])
def test_w_providedBy_valid_spec(self):
from zope.interface.declarations import Provides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
foo = Foo()
foo.__providedBy__ = Provides(Foo, IFoo)
spec = self._callFUT(foo)
self.assertEqual(list(spec), [IFoo])
def test_w_providedBy_invalid_spec(self):
class Foo(object):
pass
foo = Foo()
foo.__providedBy__ = object()
spec = self._callFUT(foo)
self.assertEqual(list(spec), [])
def test_w_providedBy_invalid_spec_class_w_implements(self):
from zope.interface.declarations import implementer
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
@implementer(IFoo)
class Foo(object):
pass
foo = Foo()
foo.__providedBy__ = object()
spec = self._callFUT(foo)
self.assertEqual(list(spec), [IFoo])
def test_w_providedBy_invalid_spec_w_provides_no_provides_on_class(self):
class Foo(object):
pass
foo = Foo()
foo.__providedBy__ = object()
expected = foo.__provides__ = object()
spec = self._callFUT(foo)
self.assertTrue(spec is expected)
def test_w_providedBy_invalid_spec_w_provides_diff_provides_on_class(self):
class Foo(object):
pass
foo = Foo()
foo.__providedBy__ = object()
expected = foo.__provides__ = object()
Foo.__provides__ = object()
spec = self._callFUT(foo)
self.assertTrue(spec is expected)
def test_w_providedBy_invalid_spec_w_provides_same_provides_on_class(self):
from zope.interface.declarations import implementer
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
@implementer(IFoo)
class Foo(object):
pass
foo = Foo()
foo.__providedBy__ = object()
foo.__provides__ = Foo.__provides__ = object()
spec = self._callFUT(foo)
self.assertEqual(list(spec), [IFoo])
class Test_providedBy(Test_providedByFallback):
# Repeat tests for C optimizations
def _callFUT(self, *args, **kw):
from zope.interface.declarations import providedBy
return providedBy(*args, **kw)
class ObjectSpecificationDescriptorFallbackTests(unittest.TestCase):
def _getTargetClass(self):
from zope.interface.declarations \
import ObjectSpecificationDescriptorFallback
return ObjectSpecificationDescriptorFallback
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_accessed_via_class(self):
from zope.interface.declarations import Provides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
class Foo(object):
pass
Foo.__provides__ = Provides(Foo, IFoo)
Foo.__providedBy__ = self._makeOne()
self.assertEqual(list(Foo.__providedBy__), [IFoo])
def test_accessed_via_inst_wo_provides(self):
from zope.interface.declarations import implementer
from zope.interface.declarations import Provides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
@implementer(IFoo)
class Foo(object):
pass
Foo.__provides__ = Provides(Foo, IBar)
Foo.__providedBy__ = self._makeOne()
foo = Foo()
self.assertEqual(list(foo.__providedBy__), [IFoo])
def test_accessed_via_inst_w_provides(self):
from zope.interface.declarations import directlyProvides
from zope.interface.declarations import implementer
from zope.interface.declarations import Provides
from zope.interface.interface import InterfaceClass
IFoo = InterfaceClass("IFoo")
IBar = InterfaceClass("IBar")
IBaz = InterfaceClass("IBaz")
@implementer(IFoo)
class Foo(object):
pass
Foo.__provides__ = Provides(Foo, IBar)
Foo.__providedBy__ = self._makeOne()
foo = Foo()
directlyProvides(foo, IBaz)
self.assertEqual(list(foo.__providedBy__), [IBaz, IFoo])
class ObjectSpecificationDescriptorTests(
ObjectSpecificationDescriptorFallbackTests):
# Repeat tests for C optimizations
def _getTargetClass(self):
from zope.interface.declarations import ObjectSpecificationDescriptor
return ObjectSpecificationDescriptor
# Test _normalizeargs through its callers.
class _Monkey(object):
# context-manager for replacing module names in the scope of a test.
def __init__(self, module, **kw):
self.module = module
self.to_restore = dict([(key, getattr(module, key)) for key in kw])
for key, value in kw.items():
setattr(module, key, value)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
for key, value in self.to_restore.items():
setattr(self.module, key, value)
class _MonkeyDict(object):
# context-manager for restoring a dict w/in a module in the scope of a test.
def __init__(self, module, attrname, **kw):
self.module = module
self.target = getattr(module, attrname)
self.to_restore = self.target.copy()
self.target.clear()
self.target.update(kw)
def __enter__(self):
return self.target
def __exit__(self, exc_type, exc_val, exc_tb):
self.target.clear()
self.target.update(self.to_restore)
def test_suite():
return unittest.TestSuite((
unittest.makeSuite(DeclarationTests),
unittest.makeSuite(ImplementsTests),
unittest.makeSuite(Test_implementedByFallback),
unittest.makeSuite(Test_implementedBy),
unittest.makeSuite(Test_classImplementsOnly),
unittest.makeSuite(Test_classImplements),
unittest.makeSuite(Test__implements_advice),
unittest.makeSuite(Test_implementer),
unittest.makeSuite(Test_implementer_only),
unittest.makeSuite(Test_implements),
unittest.makeSuite(Test_implementsOnly),
unittest.makeSuite(ProvidesClassTests),
unittest.makeSuite(Test_Provides),
unittest.makeSuite(Test_directlyProvides),
unittest.makeSuite(Test_alsoProvides),
unittest.makeSuite(Test_noLongerProvides),
unittest.makeSuite(ClassProvidesBaseFallbackTests),
unittest.makeSuite(ClassProvidesTests),
unittest.makeSuite(Test_directlyProvidedBy),
unittest.makeSuite(Test_classProvides),
unittest.makeSuite(Test_provider),
unittest.makeSuite(Test_moduleProvides),
unittest.makeSuite(Test_getObjectSpecificationFallback),
unittest.makeSuite(Test_getObjectSpecification),
unittest.makeSuite(Test_providedByFallback),
unittest.makeSuite(Test_providedBy),
unittest.makeSuite(ObjectSpecificationDescriptorFallbackTests),
unittest.makeSuite(ObjectSpecificationDescriptorTests),
))
| gpl-3.0 | 1,732,145,450,677,383,700 | 7,497,677,258,134,437,000 | 35.298685 | 80 | 0.611085 | false |
yeming233/rally | tests/unit/plugins/openstack/scenarios/ironic/test_utils.py | 1 | 2990 | # Copyright 2015: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from rally.plugins.openstack.scenarios.ironic import utils
from tests.unit import test
IRONIC_UTILS = "rally.plugins.openstack.scenarios.ironic.utils"
class IronicScenarioTestCase(test.ScenarioTestCase):
@mock.patch("%s.utils.wait_for_status" % IRONIC_UTILS)
def test__create_node(self, mock_wait_for_status):
self.admin_clients("ironic").node.create.return_value = "fake_node"
scenario = utils.IronicScenario(self.context)
scenario.generate_random_name = mock.Mock()
scenario._create_node(driver="fake", properties="fake_prop",
fake_param="foo")
self.admin_clients("ironic").node.create.assert_called_once_with(
driver="fake", properties="fake_prop", fake_param="foo",
name=scenario.generate_random_name.return_value)
self.assertTrue(mock_wait_for_status.called)
self._test_atomic_action_timer(scenario.atomic_actions(),
"ironic.create_node")
@mock.patch("%s.utils.wait_for_status" % IRONIC_UTILS)
def test__delete_node(self, mock_wait_for_status):
mock_node_delete = mock.Mock()
self.admin_clients("ironic").node.delete = mock_node_delete
scenario = utils.IronicScenario(self.context)
scenario._delete_node(mock.Mock(uuid="fake_id"))
self.assertTrue(mock_wait_for_status.called)
self.admin_clients("ironic").node.delete.assert_called_once_with(
"fake_id")
self._test_atomic_action_timer(scenario.atomic_actions(),
"ironic.delete_node")
def test__list_nodes(self):
self.admin_clients("ironic").node.list.return_value = ["fake"]
scenario = utils.IronicScenario(self.context)
fake_params = {
"sort_dir": "foo1",
"associated": "foo2",
"detail": True,
"maintenance": "foo5"
}
return_nodes_list = scenario._list_nodes(**fake_params)
self.assertEqual(["fake"], return_nodes_list)
self.admin_clients("ironic").node.list.assert_called_once_with(
sort_dir="foo1", associated="foo2", detail=True,
maintenance="foo5")
self._test_atomic_action_timer(scenario.atomic_actions(),
"ironic.list_nodes")
| apache-2.0 | -4,579,240,053,569,828,400 | -4,539,077,028,192,236,500 | 41.714286 | 78 | 0.637458 | false |
netgroup/dreamer-ryu | ryu/ofproto/ofproto_v1_0_parser.py | 9 | 81719 | # Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011, 2012 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Decoder/Encoder implementations of OpenFlow 1.0.
"""
import struct
import binascii
from ofproto_parser import StringifyMixin, MsgBase, msg_pack_into, msg_str_attr
from ryu.lib import addrconv
from ryu.lib import mac
from . import ofproto_parser
from . import ofproto_v1_0 as ofproto
from . import nx_match
from ryu import utils
import logging
LOG = logging.getLogger('ryu.ofproto.ofproto_v1_0_parser')
_MSG_PARSERS = {}
def _set_msg_type(msg_type):
'''Annotate corresponding OFP message type'''
def _set_cls_msg_type(cls):
cls.cls_msg_type = msg_type
return cls
return _set_cls_msg_type
def _register_parser(cls):
'''class decorator to register msg parser'''
assert cls.cls_msg_type is not None
assert cls.cls_msg_type not in _MSG_PARSERS
_MSG_PARSERS[cls.cls_msg_type] = cls.parser
return cls
@ofproto_parser.register_msg_parser(ofproto.OFP_VERSION)
def msg_parser(datapath, version, msg_type, msg_len, xid, buf):
parser = _MSG_PARSERS.get(msg_type)
return parser(datapath, version, msg_type, msg_len, xid, buf)
# OFP_MSG_REPLY = {
# OFPFeaturesRequest: OFPSwitchFeatures,
# OFPBarrierRequest: OFPBarrierReply,
# OFPQueueGetConfigRequest: OFPQueueGetConfigReply,
#
# # ofp_stats_request -> ofp_stats_reply
# OFPDescStatsRequest: OFPDescStatsReply,
# OFPFlowStatsRequest: OFPFlowStatsReply,
# OFPAggregateStatsRequest: OFPAggregateStatsReply,
# OFPTableStatsRequest: OFPTableStatsReply,
# OFPPortStatsRequest: OFPPortStatsReply,
# OFPQueueStatsRequest: OFPQueueStatsReply,
# OFPVendorStatsRequest: OFPVendorStatsReply,
# }
def _set_msg_reply(msg_reply):
'''Annotate OFP reply message class'''
def _set_cls_msg_reply(cls):
cls.cls_msg_reply = msg_reply
return cls
return _set_cls_msg_reply
#
# common structures
#
class OFPPhyPort(ofproto_parser.namedtuple('OFPPhyPort', (
'port_no', 'hw_addr', 'name', 'config', 'state', 'curr', 'advertised',
'supported', 'peer'))):
_TYPE = {
'ascii': [
'hw_addr',
],
'utf-8': [
# OF spec is unclear about the encoding of name.
# we assumes UTF-8, which is used by OVS.
'name',
]
}
@classmethod
def parser(cls, buf, offset):
port = struct.unpack_from(ofproto.OFP_PHY_PORT_PACK_STR,
buf, offset)
port = list(port)
i = cls._fields.index('hw_addr')
port[i] = addrconv.mac.bin_to_text(port[i])
i = cls._fields.index('name')
port[i] = port[i].rstrip('\0')
return cls(*port)
class OFPMatch(StringifyMixin):
def __init__(self, wildcards=None, in_port=None, dl_src=None, dl_dst=None,
dl_vlan=None, dl_vlan_pcp=None, dl_type=None, nw_tos=None,
nw_proto=None, nw_src=None, nw_dst=None,
tp_src=None, tp_dst=None, nw_src_mask=32, nw_dst_mask=32):
super(OFPMatch, self).__init__()
wc = ofproto.OFPFW_ALL
if in_port is None:
self.in_port = 0
else:
wc &= ~ofproto.OFPFW_IN_PORT
self.in_port = in_port
if dl_src is None:
self.dl_src = mac.DONTCARE
else:
wc &= ~ofproto.OFPFW_DL_SRC
if dl_src == 0:
self.dl_src = mac.DONTCARE
else:
self.dl_src = dl_src
if dl_dst is None:
self.dl_dst = mac.DONTCARE
else:
wc &= ~ofproto.OFPFW_DL_DST
if dl_dst == 0:
self.dl_dst = mac.DONTCARE
else:
self.dl_dst = dl_dst
if dl_vlan is None:
self.dl_vlan = 0
else:
wc &= ~ofproto.OFPFW_DL_VLAN
self.dl_vlan = dl_vlan
if dl_vlan_pcp is None:
self.dl_vlan_pcp = 0
else:
wc &= ~ofproto.OFPFW_DL_VLAN_PCP
self.dl_vlan_pcp = dl_vlan_pcp
if dl_type is None:
self.dl_type = 0
else:
wc &= ~ofproto.OFPFW_DL_TYPE
self.dl_type = dl_type
if nw_tos is None:
self.nw_tos = 0
else:
wc &= ~ofproto.OFPFW_NW_TOS
self.nw_tos = nw_tos
if nw_proto is None:
self.nw_proto = 0
else:
wc &= ~ofproto.OFPFW_NW_PROTO
self.nw_proto = nw_proto
if nw_src is None:
self.nw_src = 0
else:
wc &= (32 - nw_src_mask) << ofproto.OFPFW_NW_SRC_SHIFT \
| ~ofproto.OFPFW_NW_SRC_MASK
self.nw_src = nw_src
if nw_dst is None:
self.nw_dst = 0
else:
wc &= (32 - nw_dst_mask) << ofproto.OFPFW_NW_DST_SHIFT \
| ~ofproto.OFPFW_NW_DST_MASK
self.nw_dst = nw_dst
if tp_src is None:
self.tp_src = 0
else:
wc &= ~ofproto.OFPFW_TP_SRC
self.tp_src = tp_src
if tp_dst is None:
self.tp_dst = 0
else:
wc &= ~ofproto.OFPFW_TP_DST
self.tp_dst = tp_dst
if wildcards is None:
self.wildcards = wc
else:
self.wildcards = wildcards
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_MATCH_PACK_STR, buf, offset,
self.wildcards, self.in_port, self.dl_src,
self.dl_dst, self.dl_vlan, self.dl_vlan_pcp,
self.dl_type, self.nw_tos, self.nw_proto,
self.nw_src, self.nw_dst, self.tp_src, self.tp_dst)
@classmethod
def parse(cls, buf, offset):
match = struct.unpack_from(ofproto.OFP_MATCH_PACK_STR,
buf, offset)
return cls(*match)
class OFPActionHeader(StringifyMixin):
_base_attributes = ['type', 'len']
def __init__(self, type_, len_):
self.type = type_
self.len = len_
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_HEADER_PACK_STR,
buf, offset, self.type, self.len)
class OFPAction(OFPActionHeader):
_ACTION_TYPES = {}
@staticmethod
def register_action_type(type_, len_):
def _register_action_type(cls):
cls.cls_action_type = type_
cls.cls_action_len = len_
OFPAction._ACTION_TYPES[cls.cls_action_type] = cls
return cls
return _register_action_type
def __init__(self):
cls = self.__class__
super(OFPAction, self).__init__(cls.cls_action_type,
cls.cls_action_len)
@classmethod
def parser(cls, buf, offset):
type_, len_ = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
cls_ = cls._ACTION_TYPES.get(type_)
assert cls_ is not None
return cls_.parser(buf, offset)
@OFPAction.register_action_type(ofproto.OFPAT_OUTPUT,
ofproto.OFP_ACTION_OUTPUT_SIZE)
class OFPActionOutput(OFPAction):
# NOTE: The reason of this magic number (0xffe5)
# is because there is no good constant in of1.0.
# The same value as OFPCML_MAX of of1.2 and of1.3 is used.
def __init__(self, port, max_len=0xffe5):
super(OFPActionOutput, self).__init__()
self.port = port
self.max_len = max_len
@classmethod
def parser(cls, buf, offset):
type_, len_, port, max_len = struct.unpack_from(
ofproto.OFP_ACTION_OUTPUT_PACK_STR, buf, offset)
assert type_ == ofproto.OFPAT_OUTPUT
assert len_ == ofproto.OFP_ACTION_OUTPUT_SIZE
return cls(port, max_len)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_OUTPUT_PACK_STR, buf,
offset, self.type, self.len, self.port, self.max_len)
@OFPAction.register_action_type(ofproto.OFPAT_SET_VLAN_VID,
ofproto.OFP_ACTION_VLAN_VID_SIZE)
class OFPActionVlanVid(OFPAction):
def __init__(self, vlan_vid):
super(OFPActionVlanVid, self).__init__()
self.vlan_vid = vlan_vid
@classmethod
def parser(cls, buf, offset):
type_, len_, vlan_vid = struct.unpack_from(
ofproto.OFP_ACTION_VLAN_VID_PACK_STR, buf, offset)
assert type_ == ofproto.OFPAT_SET_VLAN_VID
assert len_ == ofproto.OFP_ACTION_VLAN_VID_SIZE
return cls(vlan_vid)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_VLAN_VID_PACK_STR,
buf, offset, self.type, self.len, self.vlan_vid)
@OFPAction.register_action_type(ofproto.OFPAT_SET_VLAN_PCP,
ofproto.OFP_ACTION_VLAN_PCP_SIZE)
class OFPActionVlanPcp(OFPAction):
def __init__(self, vlan_pcp):
super(OFPActionVlanPcp, self).__init__()
self.vlan_pcp = vlan_pcp
@classmethod
def parser(cls, buf, offset):
type_, len_, vlan_pcp = struct.unpack_from(
ofproto.OFP_ACTION_VLAN_PCP_PACK_STR, buf, offset)
assert type_ == ofproto.OFPAT_SET_VLAN_PCP
assert len_ == ofproto.OFP_ACTION_VLAN_PCP_SIZE
return cls(vlan_pcp)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_VLAN_PCP_PACK_STR,
buf, offset, self.type, self.len, self.vlan_pcp)
@OFPAction.register_action_type(ofproto.OFPAT_STRIP_VLAN,
ofproto.OFP_ACTION_HEADER_SIZE)
class OFPActionStripVlan(OFPAction):
def __init__(self):
super(OFPActionStripVlan, self).__init__()
@classmethod
def parser(cls, buf, offset):
type_, len_ = struct.unpack_from(
ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
assert type_ == ofproto.OFPAT_STRIP_VLAN
assert len_ == ofproto.OFP_ACTION_HEADER_SIZE
return cls()
class OFPActionDlAddr(OFPAction):
def __init__(self, dl_addr):
super(OFPActionDlAddr, self).__init__()
self.dl_addr = dl_addr
@classmethod
def parser(cls, buf, offset):
type_, len_, dl_addr = struct.unpack_from(
ofproto.OFP_ACTION_DL_ADDR_PACK_STR, buf, offset)
assert type_ in (ofproto.OFPAT_SET_DL_SRC,
ofproto.OFPAT_SET_DL_DST)
assert len_ == ofproto.OFP_ACTION_DL_ADDR_SIZE
return cls(dl_addr)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_DL_ADDR_PACK_STR,
buf, offset, self.type, self.len, self.dl_addr)
@OFPAction.register_action_type(ofproto.OFPAT_SET_DL_SRC,
ofproto.OFP_ACTION_DL_ADDR_SIZE)
class OFPActionSetDlSrc(OFPActionDlAddr):
def __init__(self, dl_addr):
super(OFPActionSetDlSrc, self).__init__(dl_addr)
@OFPAction.register_action_type(ofproto.OFPAT_SET_DL_DST,
ofproto.OFP_ACTION_DL_ADDR_SIZE)
class OFPActionSetDlDst(OFPActionDlAddr):
def __init__(self, dl_addr):
super(OFPActionSetDlDst, self).__init__(dl_addr)
class OFPActionNwAddr(OFPAction):
def __init__(self, nw_addr):
super(OFPActionNwAddr, self).__init__()
self.nw_addr = nw_addr
@classmethod
def parser(cls, buf, offset):
type_, len_, nw_addr = struct.unpack_from(
ofproto.OFP_ACTION_NW_ADDR_PACK_STR, buf, offset)
assert type_ in (ofproto.OFPAT_SET_NW_SRC,
ofproto.OFPAT_SET_NW_DST)
assert len_ == ofproto.OFP_ACTION_NW_ADDR_SIZE
return cls(nw_addr)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_NW_ADDR_PACK_STR,
buf, offset, self.type, self.len, self.nw_addr)
@OFPAction.register_action_type(ofproto.OFPAT_SET_NW_SRC,
ofproto.OFP_ACTION_NW_ADDR_SIZE)
class OFPActionSetNwSrc(OFPActionNwAddr):
def __init__(self, nw_addr):
super(OFPActionSetNwSrc, self).__init__(nw_addr)
@OFPAction.register_action_type(ofproto.OFPAT_SET_NW_DST,
ofproto.OFP_ACTION_NW_ADDR_SIZE)
class OFPActionSetNwDst(OFPActionNwAddr):
def __init__(self, nw_addr):
super(OFPActionSetNwDst, self).__init__(nw_addr)
@OFPAction.register_action_type(ofproto.OFPAT_SET_NW_TOS,
ofproto.OFP_ACTION_NW_TOS_SIZE)
class OFPActionSetNwTos(OFPAction):
def __init__(self, tos):
super(OFPActionSetNwTos, self).__init__()
self.tos = tos
@classmethod
def parser(cls, buf, offset):
type_, len_, tos = struct.unpack_from(
ofproto.OFP_ACTION_NW_TOS_PACK_STR, buf, offset)
assert type_ == ofproto.OFPAT_SET_NW_TOS
assert len_ == ofproto.OFP_ACTION_NW_TOS_SIZE
return cls(tos)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_NW_TOS_PACK_STR,
buf, offset, self.type, self.len, self.tos)
class OFPActionTpPort(OFPAction):
def __init__(self, tp):
super(OFPActionTpPort, self).__init__()
self.tp = tp
@classmethod
def parser(cls, buf, offset):
type_, len_, tp = struct.unpack_from(
ofproto.OFP_ACTION_TP_PORT_PACK_STR, buf, offset)
assert type_ in (ofproto.OFPAT_SET_TP_SRC,
ofproto.OFPAT_SET_TP_DST)
assert len_ == ofproto.OFP_ACTION_TP_PORT_SIZE
return cls(tp)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_TP_PORT_PACK_STR,
buf, offset, self.type, self.len, self.tp)
@OFPAction.register_action_type(ofproto.OFPAT_SET_TP_SRC,
ofproto.OFP_ACTION_TP_PORT_SIZE)
class OFPActionSetTpSrc(OFPActionTpPort):
def __init__(self, tp):
super(OFPActionSetTpSrc, self).__init__(tp)
@OFPAction.register_action_type(ofproto.OFPAT_SET_TP_DST,
ofproto.OFP_ACTION_TP_PORT_SIZE)
class OFPActionSetTpDst(OFPActionTpPort):
def __init__(self, tp):
super(OFPActionSetTpDst, self).__init__(tp)
@OFPAction.register_action_type(ofproto.OFPAT_ENQUEUE,
ofproto.OFP_ACTION_ENQUEUE_SIZE)
class OFPActionEnqueue(OFPAction):
def __init__(self, port, queue_id):
super(OFPActionEnqueue, self).__init__()
self.port = port
self.queue_id = queue_id
@classmethod
def parser(cls, buf, offset):
type_, len_, port, queue_id = struct.unpack_from(
ofproto.OFP_ACTION_ENQUEUE_PACK_STR, buf, offset)
assert type_ == ofproto.OFPAT_ENQUEUE
assert len_ == ofproto.OFP_ACTION_ENQUEUE_SIZE
return cls(port, queue_id)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_ENQUEUE_PACK_STR, buf, offset,
self.type, self.len, self.port, self.queue_id)
@OFPAction.register_action_type(ofproto.OFPAT_VENDOR, 0)
class OFPActionVendor(OFPAction):
_ACTION_VENDORS = {}
@staticmethod
def register_action_vendor(vendor):
def _register_action_vendor(cls):
cls.cls_vendor = vendor
OFPActionVendor._ACTION_VENDORS[cls.cls_vendor] = cls
return cls
return _register_action_vendor
def __init__(self):
super(OFPActionVendor, self).__init__()
self.vendor = self.cls_vendor
@classmethod
def parser(cls, buf, offset):
type_, len_, vendor = struct.unpack_from(
ofproto.OFP_ACTION_VENDOR_HEADER_PACK_STR, buf, offset)
cls_ = cls._ACTION_VENDORS.get(vendor)
return cls_.parser(buf, offset)
@OFPActionVendor.register_action_vendor(ofproto.NX_VENDOR_ID)
class NXActionHeader(OFPActionVendor):
_NX_ACTION_SUBTYPES = {}
@staticmethod
def register_nx_action_subtype(subtype, len_):
def _register_nx_action_subtype(cls):
cls.cls_action_len = len_
cls.cls_subtype = subtype
NXActionHeader._NX_ACTION_SUBTYPES[cls.cls_subtype] = cls
return cls
return _register_nx_action_subtype
def __init__(self):
super(NXActionHeader, self).__init__()
self.subtype = self.cls_subtype
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_HEADER_PACK_STR,
buf, offset, self.type, self.len)
@classmethod
def parser(cls, buf, offset):
type_, len_, vendor, subtype = struct.unpack_from(
ofproto.NX_ACTION_HEADER_PACK_STR, buf, offset)
cls_ = cls._NX_ACTION_SUBTYPES.get(subtype)
return cls_.parser(buf, offset)
class NXActionResubmitBase(NXActionHeader):
def __init__(self, in_port, table):
super(NXActionResubmitBase, self).__init__()
assert self.subtype in (ofproto.NXAST_RESUBMIT,
ofproto.NXAST_RESUBMIT_TABLE)
self.in_port = in_port
self.table = table
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_RESUBMIT_PACK_STR, buf, offset,
self.type, self.len, self.vendor, self.subtype,
self.in_port, self.table)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_RESUBMIT, ofproto.NX_ACTION_RESUBMIT_SIZE)
class NXActionResubmit(NXActionResubmitBase):
def __init__(self, in_port=ofproto.OFPP_IN_PORT):
super(NXActionResubmit, self).__init__(in_port, 0)
@classmethod
def parser(cls, buf, offset):
type_, len_, vendor, subtype, in_port, table = struct.unpack_from(
ofproto.NX_ACTION_RESUBMIT_PACK_STR, buf, offset)
return cls(in_port)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_RESUBMIT_TABLE, ofproto.NX_ACTION_RESUBMIT_SIZE)
class NXActionResubmitTable(NXActionResubmitBase):
def __init__(self, in_port=ofproto.OFPP_IN_PORT, table=0xff):
super(NXActionResubmitTable, self).__init__(in_port, table)
@classmethod
def parser(cls, buf, offset):
type_, len_, vendor, subtype, in_port, table = struct.unpack_from(
ofproto.NX_ACTION_RESUBMIT_PACK_STR, buf, offset)
return cls(in_port, table)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_SET_TUNNEL, ofproto.NX_ACTION_SET_TUNNEL_SIZE)
class NXActionSetTunnel(NXActionHeader):
def __init__(self, tun_id):
super(NXActionSetTunnel, self).__init__()
self.tun_id = tun_id
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_SET_TUNNEL_PACK_STR, buf,
offset, self.type, self.len, self.vendor, self.subtype,
self.tun_id)
@classmethod
def parser(cls, buf, offset):
type_, len_, vendor, subtype, tun_id = struct.unpack_from(
ofproto.NX_ACTION_SET_TUNNEL_PACK_STR, buf, offset)
return cls(tun_id)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_SET_QUEUE, ofproto.NX_ACTION_SET_QUEUE_SIZE)
class NXActionSetQueue(NXActionHeader):
def __init__(self, queue_id):
super(NXActionSetQueue, self).__init__()
self.queue_id = queue_id
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_SET_QUEUE_PACK_STR, buf,
offset, self.type, self.len, self.vendor,
self.subtype, self.queue_id)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, queue_id) = struct.unpack_from(
ofproto.NX_ACTION_SET_QUEUE_PACK_STR, buf, offset)
return cls(queue_id)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_POP_QUEUE, ofproto.NX_ACTION_POP_QUEUE_SIZE)
class NXActionPopQueue(NXActionHeader):
def __init__(self):
super(NXActionPopQueue, self).__init__()
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_POP_QUEUE_PACK_STR, buf,
offset, self.type, self.len, self.vendor,
self.subtype)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype) = struct.unpack_from(
ofproto.NX_ACTION_POP_QUEUE_PACK_STR, buf, offset)
return cls()
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_REG_MOVE, ofproto.NX_ACTION_REG_MOVE_SIZE)
class NXActionRegMove(NXActionHeader):
def __init__(self, n_bits, src_ofs, dst_ofs, src, dst):
super(NXActionRegMove, self).__init__()
self.n_bits = n_bits
self.src_ofs = src_ofs
self.dst_ofs = dst_ofs
self.src = src
self.dst = dst
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_REG_MOVE_PACK_STR, buf,
offset, self.type, self.len, self.vendor,
self.subtype, self.n_bits, self.src_ofs, self.dst_ofs,
self.src, self.dst)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, n_bits, src_ofs, dst_ofs,
src, dst) = struct.unpack_from(
ofproto.NX_ACTION_REG_MOVE_PACK_STR, buf, offset)
return cls(n_bits, src_ofs, dst_ofs, src, dst)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_REG_LOAD, ofproto.NX_ACTION_REG_LOAD_SIZE)
class NXActionRegLoad(NXActionHeader):
def __init__(self, ofs_nbits, dst, value):
super(NXActionRegLoad, self).__init__()
self.ofs_nbits = ofs_nbits
self.dst = dst
self.value = value
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_REG_LOAD_PACK_STR, buf,
offset, self.type, self.len, self.vendor,
self.subtype, self.ofs_nbits, self.dst, self.value)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, ofs_nbits, dst,
value) = struct.unpack_from(
ofproto.NX_ACTION_REG_LOAD_PACK_STR, buf, offset)
return cls(ofs_nbits, dst, value)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_SET_TUNNEL64, ofproto.NX_ACTION_SET_TUNNEL64_SIZE)
class NXActionSetTunnel64(NXActionHeader):
def __init__(self, tun_id):
super(NXActionSetTunnel64, self).__init__()
self.tun_id = tun_id
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_SET_TUNNEL64_PACK_STR, buf,
offset, self.type, self.len, self.vendor, self.subtype,
self.tun_id)
@classmethod
def parser(cls, buf, offset):
type_, len_, vendor, subtype, tun_id = struct.unpack_from(
ofproto.NX_ACTION_SET_TUNNEL64_PACK_STR, buf, offset)
return cls(tun_id)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_MULTIPATH, ofproto.NX_ACTION_MULTIPATH_SIZE)
class NXActionMultipath(NXActionHeader):
def __init__(self, fields, basis, algorithm, max_link, arg,
ofs_nbits, dst):
super(NXActionMultipath, self).__init__()
self.fields = fields
self.basis = basis
self.algorithm = algorithm
self.max_link = max_link
self.arg = arg
self.ofs_nbits = ofs_nbits
self.dst = dst
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_MULTIPATH_PACK_STR, buf,
offset, self.type, self.len, self.vendor, self.subtype,
self.fields, self.basis, self.algorithm, self.max_link,
self.arg, self.ofs_nbits, self.dst)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, fields, basis, algorithm,
max_link, arg, ofs_nbits, dst) = struct.unpack_from(
ofproto.NX_ACTION_MULTIPATH_PACK_STR, buf, offset)
return cls(fields, basis, algorithm, max_link, arg, ofs_nbits,
dst)
@NXActionHeader.register_nx_action_subtype(ofproto.NXAST_NOTE, 0)
class NXActionNote(NXActionHeader):
def __init__(self, note):
super(NXActionNote, self).__init__()
# should check here if the note is valid (only hex values)
pad = (len(note) + 10) % 8
if pad:
note += [0x0 for i in range(8 - pad)]
self.note = note
self.len = len(note) + 10
def serialize(self, buf, offset):
note = self.note
extra = None
extra_len = len(self.note) - 6
if extra_len > 0:
extra = note[6:]
note = note[0:6]
msg_pack_into(ofproto.NX_ACTION_NOTE_PACK_STR, buf,
offset, self.type, self.len, self.vendor, self.subtype,
*note)
if extra_len > 0:
msg_pack_into('B' * extra_len, buf,
offset + ofproto.NX_ACTION_NOTE_SIZE,
*extra)
@classmethod
def parser(cls, buf, offset):
note = struct.unpack_from(
ofproto.NX_ACTION_NOTE_PACK_STR, buf, offset)
(type_, len_, vendor, subtype) = note[0:4]
note = [i for i in note[4:]]
if len_ > ofproto.NX_ACTION_NOTE_SIZE:
note_start = offset + ofproto.NX_ACTION_NOTE_SIZE
note_end = note_start + len_ - ofproto.NX_ACTION_NOTE_SIZE
note += [int(binascii.b2a_hex(i), 16) for i
in buf[note_start:note_end]]
return cls(note)
class NXActionBundleBase(NXActionHeader):
def __init__(self, algorithm, fields, basis, slave_type, n_slaves,
ofs_nbits, dst, slaves):
super(NXActionBundleBase, self).__init__()
_len = ofproto.NX_ACTION_BUNDLE_SIZE + len(slaves) * 2
_len += (_len % 8)
self.len = _len
self.algorithm = algorithm
self.fields = fields
self.basis = basis
self.slave_type = slave_type
self.n_slaves = n_slaves
self.ofs_nbits = ofs_nbits
self.dst = dst
self.slaves = slaves
def serialize(self, buf, offset):
slave_offset = offset + ofproto.NX_ACTION_BUNDLE_SIZE
for s in self.slaves:
msg_pack_into('!H', buf, slave_offset, s)
slave_offset += 2
pad_len = (len(self.slaves) * 2 +
ofproto.NX_ACTION_BUNDLE_SIZE) % 8
if pad_len != 0:
msg_pack_into('%dx' % pad_len, buf, slave_offset)
msg_pack_into(ofproto.NX_ACTION_BUNDLE_PACK_STR, buf,
offset, self.type, self.len, self.vendor, self.subtype,
self.algorithm, self.fields, self.basis,
self.slave_type, self.n_slaves,
self.ofs_nbits, self.dst)
@classmethod
def parser(cls, action_cls, buf, offset):
(type_, len_, vendor, subtype, algorithm, fields, basis,
slave_type, n_slaves, ofs_nbits, dst) = struct.unpack_from(
ofproto.NX_ACTION_BUNDLE_PACK_STR, buf, offset)
slave_offset = offset + ofproto.NX_ACTION_BUNDLE_SIZE
slaves = []
for i in range(0, n_slaves):
s = struct.unpack_from('!H', buf, slave_offset)
slaves.append(s[0])
slave_offset += 2
return action_cls(algorithm, fields, basis, slave_type,
n_slaves, ofs_nbits, dst, slaves)
@NXActionHeader.register_nx_action_subtype(ofproto.NXAST_BUNDLE, 0)
class NXActionBundle(NXActionBundleBase):
def __init__(self, algorithm, fields, basis, slave_type, n_slaves,
ofs_nbits, dst, slaves):
super(NXActionBundle, self).__init__(
algorithm, fields, basis, slave_type, n_slaves,
ofs_nbits, dst, slaves)
@classmethod
def parser(cls, buf, offset):
return NXActionBundleBase.parser(NXActionBundle, buf, offset)
@NXActionHeader.register_nx_action_subtype(ofproto.NXAST_BUNDLE_LOAD, 0)
class NXActionBundleLoad(NXActionBundleBase):
def __init__(self, algorithm, fields, basis, slave_type, n_slaves,
ofs_nbits, dst, slaves):
super(NXActionBundleLoad, self).__init__(
algorithm, fields, basis, slave_type, n_slaves,
ofs_nbits, dst, slaves)
@classmethod
def parser(cls, buf, offset):
return NXActionBundleBase.parser(NXActionBundleLoad, buf, offset)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_AUTOPATH, ofproto.NX_ACTION_AUTOPATH_SIZE)
class NXActionAutopath(NXActionHeader):
def __init__(self, ofs_nbits, dst, id_):
super(NXActionAutopath, self).__init__()
self.ofs_nbits = ofs_nbits
self.dst = dst
self.id = id_
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_AUTOPATH_PACK_STR, buf, offset,
self.type, self.len, self.vendor, self.subtype,
self.ofs_nbits, self.dst, self.id)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, ofs_nbits, dst,
id_) = struct.unpack_from(
ofproto.NX_ACTION_AUTOPATH_PACK_STR, buf, offset)
return cls(ofs_nbits, dst, id_)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_OUTPUT_REG, ofproto.NX_ACTION_OUTPUT_REG_SIZE)
class NXActionOutputReg(NXActionHeader):
def __init__(self, ofs_nbits, src, max_len):
super(NXActionOutputReg, self).__init__()
self.ofs_nbits = ofs_nbits
self.src = src
self.max_len = max_len
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_OUTPUT_REG_PACK_STR, buf, offset,
self.type, self.len, self.vendor, self.subtype,
self.ofs_nbits, self.src, self.max_len)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, ofs_nbits, src,
max_len) = struct.unpack_from(
ofproto.NX_ACTION_OUTPUT_REG_PACK_STR, buf, offset)
return cls(ofs_nbits, src, max_len)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_EXIT, ofproto.NX_ACTION_HEADER_SIZE)
class NXActionExit(NXActionHeader):
def __init__(self):
super(NXActionExit, self).__init__()
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_HEADER_PACK_STR, buf, offset,
self.type, self.len, self.vendor, self.subtype)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype) = struct.unpack_from(
ofproto.NX_ACTION_HEADER_PACK_STR, buf, offset)
return cls()
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_DEC_TTL, ofproto.NX_ACTION_HEADER_SIZE)
class NXActionDecTtl(NXActionHeader):
def __init__(self):
super(NXActionDecTtl, self).__init__()
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_HEADER_PACK_STR, buf, offset,
self.type, self.len, self.vendor, self.subtype)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype) = struct.unpack_from(
ofproto.NX_ACTION_HEADER_PACK_STR, buf, offset)
return cls()
@NXActionHeader.register_nx_action_subtype(ofproto.NXAST_LEARN, 0)
class NXActionLearn(NXActionHeader):
def __init__(self, idle_timeout, hard_timeout, priority, cookie, flags,
table_id, fin_idle_timeout, fin_hard_timeout, spec):
super(NXActionLearn, self).__init__()
len_ = len(spec) + ofproto.NX_ACTION_LEARN_SIZE
pad_len = 8 - (len_ % 8)
self.len = len_ + pad_len
self.idle_timeout = idle_timeout
self.hard_timeout = hard_timeout
self.priority = priority
self.cookie = cookie
self.flags = flags
self.table_id = table_id
self.fin_idle_timeout = fin_idle_timeout
self.fin_hard_timeout = fin_hard_timeout
self.spec = spec + bytearray('\x00' * pad_len)
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_LEARN_PACK_STR, buf, offset,
self.type, self.len, self.vendor, self.subtype,
self.idle_timeout, self.hard_timeout, self.priority,
self.cookie, self.flags, self.table_id,
self.fin_idle_timeout, self.fin_hard_timeout)
buf += self.spec
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, idle_timeout, hard_timeout, priority,
cookie, flags, table_id, fin_idle_timeout,
fin_hard_timeout) = struct.unpack_from(
ofproto.NX_ACTION_LEARN_PACK_STR, buf, offset)
spec = buf[offset + ofproto.NX_ACTION_LEARN_SIZE:]
return cls(idle_timeout, hard_timeout, priority,
cookie, flags, table_id, fin_idle_timeout,
fin_hard_timeout, spec)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_CONTROLLER, ofproto.NX_ACTION_CONTROLLER_SIZE)
class NXActionController(NXActionHeader):
def __init__(self, max_len, controller_id, reason):
super(NXActionController, self).__init__()
self.max_len = max_len
self.controller_id = controller_id
self.reason = reason
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_CONTROLLER_PACK_STR, buf, offset,
self.type, self.len, self.vendor, self.subtype,
self.max_len, self.controller_id, self.reason, 0)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, max_len, controller_id, reason,
_zero) = struct.unpack_from(
ofproto.NX_ACTION_CONTROLLER_PACK_STR, buf, offset)
return cls(max_len, controller_id, reason)
@NXActionHeader.register_nx_action_subtype(
ofproto.NXAST_FIN_TIMEOUT, ofproto.NX_ACTION_FIN_TIMEOUT_SIZE)
class NXActionFinTimeout(NXActionHeader):
def __init__(self, fin_idle_timeout, fin_hard_timeout):
super(NXActionFinTimeout, self).__init__()
self.fin_idle_timeout = fin_idle_timeout
self.fin_hard_timeout = fin_hard_timeout
def serialize(self, buf, offset):
msg_pack_into(ofproto.NX_ACTION_FIN_TIMEOUT_PACK_STR, buf, offset,
self.type, self.len, self.vendor, self.subtype,
self.fin_idle_timeout, self.fin_hard_timeout)
@classmethod
def parser(cls, buf, offset):
(type_, len_, vendor, subtype, fin_idle_timeout,
fin_hard_timeout) = struct.unpack_from(
ofproto.NX_ACTION_FIN_TIMEOUT_PACK_STR, buf, offset)
return cls(fin_idle_timeout, fin_hard_timeout)
class OFPDescStats(ofproto_parser.namedtuple('OFPDescStats', (
'mfr_desc', 'hw_desc', 'sw_desc', 'serial_num', 'dp_desc'))):
_TYPE = {
'ascii': [
'mfr_desc',
'hw_desc',
'sw_desc',
'serial_num',
'dp_desc',
]
}
@classmethod
def parser(cls, buf, offset):
desc = struct.unpack_from(ofproto.OFP_DESC_STATS_PACK_STR,
buf, offset)
desc = list(desc)
desc = map(lambda x: x.rstrip('\0'), desc)
stats = cls(*desc)
stats.length = ofproto.OFP_DESC_STATS_SIZE
return stats
class OFPFlowStats(StringifyMixin):
def __init__(self):
super(OFPFlowStats, self).__init__()
self.length = None
self.table_id = None
self.match = None
self.duration_sec = None
self.duration_nsec = None
self.priority = None
self.idle_timeout = None
self.hard_timeout = None
self.cookie = None
self.packet_count = None
self.byte_count = None
self.actions = None
@classmethod
def parser(cls, buf, offset):
flow_stats = cls()
flow_stats.length, flow_stats.table_id = struct.unpack_from(
ofproto.OFP_FLOW_STATS_0_PACK_STR, buf, offset)
offset += ofproto.OFP_FLOW_STATS_0_SIZE
flow_stats.match = OFPMatch.parse(buf, offset)
offset += ofproto.OFP_MATCH_SIZE
(flow_stats.duration_sec,
flow_stats.duration_nsec,
flow_stats.priority,
flow_stats.idle_timeout,
flow_stats.hard_timeout,
flow_stats.cookie,
flow_stats.packet_count,
flow_stats.byte_count) = struct.unpack_from(
ofproto.OFP_FLOW_STATS_1_PACK_STR, buf, offset)
offset += ofproto.OFP_FLOW_STATS_1_SIZE
flow_stats.actions = []
length = ofproto.OFP_FLOW_STATS_SIZE
while length < flow_stats.length:
action = OFPAction.parser(buf, offset)
flow_stats.actions.append(action)
offset += action.len
length += action.len
return flow_stats
class OFPAggregateStats(ofproto_parser.namedtuple('OFPAggregateStats', (
'packet_count', 'byte_count', 'flow_count'))):
@classmethod
def parser(cls, buf, offset):
agg = struct.unpack_from(
ofproto.OFP_AGGREGATE_STATS_REPLY_PACK_STR, buf, offset)
stats = cls(*agg)
stats.length = ofproto.OFP_AGGREGATE_STATS_REPLY_SIZE
return stats
class OFPTableStats(ofproto_parser.namedtuple('OFPTableStats', (
'table_id', 'name', 'wildcards', 'max_entries', 'active_count',
'lookup_count', 'matched_count'))):
_TYPE = {
'utf-8': [
# OF spec is unclear about the encoding of name.
# we assumes UTF-8.
'name',
]
}
@classmethod
def parser(cls, buf, offset):
tbl = struct.unpack_from(ofproto.OFP_TABLE_STATS_PACK_STR,
buf, offset)
tbl = list(tbl)
i = cls._fields.index('name')
tbl[i] = tbl[i].rstrip('\0')
stats = cls(*tbl)
stats.length = ofproto.OFP_TABLE_STATS_SIZE
return stats
class OFPPortStats(ofproto_parser.namedtuple('OFPPortStats', (
'port_no', 'rx_packets', 'tx_packets', 'rx_bytes', 'tx_bytes',
'rx_dropped', 'tx_dropped', 'rx_errors', 'tx_errors',
'rx_frame_err', 'rx_over_err', 'rx_crc_err', 'collisions'))):
@classmethod
def parser(cls, buf, offset):
port = struct.unpack_from(ofproto.OFP_PORT_STATS_PACK_STR,
buf, offset)
stats = cls(*port)
stats.length = ofproto.OFP_PORT_STATS_SIZE
return stats
class OFPQueueStats(ofproto_parser.namedtuple('OFPQueueStats', (
'port_no', 'queue_id', 'tx_bytes', 'tx_packets', 'tx_errors'))):
@classmethod
def parser(cls, buf, offset):
queue = struct.unpack_from(ofproto.OFP_QUEUE_STATS_PACK_STR,
buf, offset)
stats = cls(*queue)
stats.length = ofproto.OFP_QUEUE_STATS_SIZE
return stats
class OFPVendorStats(ofproto_parser.namedtuple('OFPVendorStats',
('specific_data'))):
@classmethod
def parser(cls, buf, offset):
stats = cls(buf[offset:])
stats.length = len(stats.specific_data)
return stats
class NXFlowStats(StringifyMixin):
def __init__(self):
super(NXFlowStats, self).__init__()
self.length = None
self.table_id = None
self.duration_sec = None
self.duration_nsec = None
self.priority = None
self.idle_timeout = None
self.hard_timeout = None
self.match_len = None
self.idle_age = None
self.hard_age = None
self.cookie = None
self.packet_count = None
self.byte_count = None
@classmethod
def parser(cls, buf, offset):
original_offset = offset
nxflow_stats = cls()
(nxflow_stats.length, nxflow_stats.table_id,
nxflow_stats.duration_sec, nxflow_stats.duration_nsec,
nxflow_stats.priority, nxflow_stats.idle_timeout,
nxflow_stats.hard_timeout, nxflow_stats.match_len,
nxflow_stats.idle_age, nxflow_stats.hard_age,
nxflow_stats.cookie, nxflow_stats.packet_count,
nxflow_stats.byte_count) = struct.unpack_from(
ofproto.NX_FLOW_STATS_PACK_STR, buf, offset)
offset += ofproto.NX_FLOW_STATS_SIZE
fields = []
match_len = nxflow_stats.match_len
match_len -= 4
while match_len > 0:
field = nx_match.MFField.parser(buf, offset)
offset += field.length
match_len -= field.length
fields.append(field)
nxflow_stats.fields = fields
actions = []
total_len = original_offset + nxflow_stats.length
match_len = nxflow_stats.match_len
offset += utils.round_up(match_len, 8) - match_len
while offset < total_len:
action = OFPAction.parser(buf, offset)
actions.append(action)
offset += action.len
nxflow_stats.actions = actions
return nxflow_stats
class NXAggregateStats(ofproto_parser.namedtuple('NXAggregateStats', (
'packet_count', 'byte_count', 'flow_count'))):
@classmethod
def parser(cls, buf, offset):
agg = struct.unpack_from(
ofproto.NX_AGGREGATE_STATS_REPLY_PACK_STR, buf, offset)
stats = cls(*agg)
stats.length = ofproto.NX_AGGREGATE_STATS_REPLY_SIZE
return stats
class OFPQueuePropHeader(StringifyMixin):
_QUEUE_PROPERTIES = {}
@staticmethod
def register_queue_property(prop_type, prop_len):
def _register_queue_propery(cls):
cls.cls_prop_type = prop_type
cls.cls_prop_len = prop_len
OFPQueuePropHeader._QUEUE_PROPERTIES[prop_type] = cls
return cls
return _register_queue_propery
def __init__(self):
self.property = self.cls_prop_type
self.len = self.cls_prop_len
@classmethod
def parser(cls, buf, offset):
property_, len_ = struct.unpack_from(
ofproto.OFP_QUEUE_PROP_HEADER_PACK_STR, buf, offset)
prop_cls = cls._QUEUE_PROPERTIES[property_]
assert property_ == prop_cls.cls_prop_type
assert len_ == prop_cls.cls_prop_len
offset += ofproto.OFP_QUEUE_PROP_HEADER_SIZE
return prop_cls.parser(buf, offset)
@OFPQueuePropHeader.register_queue_property(
ofproto.OFPQT_NONE, ofproto.OFP_QUEUE_PROP_HEADER_SIZE)
class OFPQueuePropNone(OFPQueuePropHeader):
def __init__(self):
super(OFPQueuePropNone, self).__init__()
@classmethod
def parser(cls, buf, offset):
return cls()
@OFPQueuePropHeader.register_queue_property(
ofproto.OFPQT_MIN_RATE, ofproto.OFP_QUEUE_PROP_MIN_RATE_SIZE)
class OFPQueuePropMinRate(OFPQueuePropHeader):
def __init__(self, rate):
super(OFPQueuePropMinRate, self).__init__()
self.rate = rate
@classmethod
def parser(cls, buf, offset):
(rate,) = struct.unpack_from(
ofproto.OFP_QUEUE_PROP_MIN_RATE_PACK_STR,
buf, offset)
return cls(rate)
class OFPPacketQueue(StringifyMixin):
def __init__(self, queue_id, len_):
self.queue_id = queue_id
self.len = len_
self.properties = None
@classmethod
def parser(cls, buf, offset):
queue_id, len_ = struct.unpack_from(
ofproto.OFP_PACKET_QUEUE_PQCK_STR, buf, offset)
packet_queue = cls(queue_id, len_)
packet_queue.properties = []
cur_len = ofproto.OFP_PACKET_QUEUE_SIZE
offset += ofproto.OFP_PACKET_QUEUE_SIZE
while (cur_len + ofproto.OFP_QUEUE_PROP_HEADER_SIZE <=
packet_queue.len):
prop = OFPQueuePropHeader.parser(buf, offset)
packet_queue.properties.append(prop)
cur_len += prop.len
offset += prop.len
return packet_queue
#
# Symmetric messages
# parser + serializer
#
@_register_parser
@_set_msg_type(ofproto.OFPT_HELLO)
class OFPHello(MsgBase):
def __init__(self, datapath):
super(OFPHello, self).__init__(datapath)
@_register_parser
@_set_msg_type(ofproto.OFPT_ERROR)
class OFPErrorMsg(MsgBase):
def __init__(self, datapath, type_=None, code=None, data=None):
super(OFPErrorMsg, self).__init__(datapath)
self.type = type_
self.code = code
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPErrorMsg, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.type, msg.code = struct.unpack_from(
ofproto.OFP_ERROR_MSG_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
msg.data = msg.buf[ofproto.OFP_ERROR_MSG_SIZE:]
return msg
def _serialize_body(self):
assert self.data is not None
msg_pack_into(ofproto.OFP_ERROR_MSG_PACK_STR, self.buf,
ofproto.OFP_HEADER_SIZE, self.type, self.code)
self.buf += self.data
@_register_parser
@_set_msg_type(ofproto.OFPT_ECHO_REQUEST)
class OFPEchoRequest(MsgBase):
def __init__(self, datapath, data=None):
super(OFPEchoRequest, self).__init__(datapath)
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPEchoRequest, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.data = msg.buf[ofproto.OFP_HEADER_SIZE:]
return msg
def _serialize_body(self):
if self.data is not None:
self.buf += self.data
@_register_parser
@_set_msg_type(ofproto.OFPT_ECHO_REPLY)
class OFPEchoReply(MsgBase):
def __init__(self, datapath, data=None):
super(OFPEchoReply, self).__init__(datapath)
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPEchoReply, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.data = msg.buf[ofproto.OFP_HEADER_SIZE:]
return msg
def _serialize_body(self):
assert self.data is not None
self.buf += self.data
@_register_parser
@_set_msg_type(ofproto.OFPT_VENDOR)
class OFPVendor(MsgBase):
_VENDORS = {}
@staticmethod
def register_vendor(id_):
def _register_vendor(cls):
OFPVendor._VENDORS[id_] = cls
return cls
return _register_vendor
def __init__(self, datapath):
super(OFPVendor, self).__init__(datapath)
self.data = None
self.vendor = None
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPVendor, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.vendor,) = struct.unpack_from(
ofproto.OFP_VENDOR_HEADER_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
cls_ = cls._VENDORS.get(msg.vendor)
if cls_:
msg.data = cls_.parser(datapath, msg.buf, 0)
else:
msg.data = msg.buf[ofproto.OFP_VENDOR_HEADER_SIZE:]
return msg
def serialize_header(self):
msg_pack_into(ofproto.OFP_VENDOR_HEADER_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE, self.vendor)
def _serialize_body(self):
assert self.data is not None
self.serialize_header()
self.buf += self.data
@OFPVendor.register_vendor(ofproto.NX_VENDOR_ID)
class NiciraHeader(OFPVendor):
_NX_SUBTYPES = {}
@staticmethod
def register_nx_subtype(subtype):
def _register_nx_subtype(cls):
cls.cls_subtype = subtype
NiciraHeader._NX_SUBTYPES[cls.cls_subtype] = cls
return cls
return _register_nx_subtype
def __init__(self, datapath, subtype):
super(NiciraHeader, self).__init__(datapath)
self.vendor = ofproto.NX_VENDOR_ID
self.subtype = subtype
def serialize_header(self):
super(NiciraHeader, self).serialize_header()
msg_pack_into(ofproto.NICIRA_HEADER_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.vendor, self.subtype)
@classmethod
def parser(cls, datapath, buf, offset):
vendor, subtype = struct.unpack_from(
ofproto.NICIRA_HEADER_PACK_STR, buf,
offset + ofproto.OFP_HEADER_SIZE)
cls_ = cls._NX_SUBTYPES.get(subtype)
return cls_.parser(datapath, buf,
offset + ofproto.NICIRA_HEADER_SIZE)
class NXTSetFlowFormat(NiciraHeader):
def __init__(self, datapath, flow_format):
super(NXTSetFlowFormat, self).__init__(
datapath, ofproto.NXT_SET_FLOW_FORMAT)
self.format = flow_format
def _serialize_body(self):
self.serialize_header()
msg_pack_into(ofproto.NX_SET_FLOW_FORMAT_PACK_STR,
self.buf, ofproto.NICIRA_HEADER_SIZE, self.format)
class NXTFlowMod(NiciraHeader):
def __init__(self, datapath, cookie, command,
idle_timeout=0, hard_timeout=0,
priority=ofproto.OFP_DEFAULT_PRIORITY,
buffer_id=0xffffffff, out_port=ofproto.OFPP_NONE,
flags=0, rule=None, actions=None):
# the argument, rule, is positioned at the one before the last due
# to the layout struct nxt_flow_mod.
# Although rule must be given, default argument to rule, None,
# is given to allow other default value of argument before rule.
assert rule is not None
if actions is None:
actions = []
super(NXTFlowMod, self).__init__(datapath, ofproto.NXT_FLOW_MOD)
self.cookie = cookie
self.command = command
self.idle_timeout = idle_timeout
self.hard_timeout = hard_timeout
self.priority = priority
self.buffer_id = buffer_id
self.out_port = out_port
self.flags = flags
self.rule = rule
self.actions = actions
def _serialize_body(self):
self.serialize_header()
offset = ofproto.NX_FLOW_MOD_SIZE
match_len = nx_match.serialize_nxm_match(self.rule, self.buf, offset)
offset += nx_match.round_up(match_len)
msg_pack_into(ofproto.NX_FLOW_MOD_PACK_STR,
self.buf, ofproto.NICIRA_HEADER_SIZE,
self.cookie, self.command, self.idle_timeout,
self.hard_timeout, self.priority, self.buffer_id,
self.out_port, self.flags, match_len)
if self.actions is not None:
for a in self.actions:
a.serialize(self.buf, offset)
offset += a.len
class NXTRoleRequest(NiciraHeader):
def __init__(self, datapath, role):
super(NXTRoleRequest, self).__init__(
datapath, ofproto.NXT_ROLE_REQUEST)
self.role = role
def _serialize_body(self):
self.serialize_header()
msg_pack_into(ofproto.NX_ROLE_PACK_STR,
self.buf, ofproto.NICIRA_HEADER_SIZE, self.role)
@NiciraHeader.register_nx_subtype(ofproto.NXT_ROLE_REPLY)
class NXTRoleReply(NiciraHeader):
def __init__(self, datapath, role):
super(NXTRoleReply, self).__init__(
datapath, ofproto.NXT_ROLE_REPLY)
self.role = role
@classmethod
def parser(cls, datapath, buf, offset):
(role,) = struct.unpack_from(
ofproto.NX_ROLE_PACK_STR, buf, offset)
return cls(datapath, role)
class NXTFlowModTableId(NiciraHeader):
def __init__(self, datapath, set_):
super(NXTFlowModTableId, self).__init__(
datapath, ofproto.NXT_FLOW_MOD_TABLE_ID)
self.set = set_
def _serialize_body(self):
self.serialize_header()
msg_pack_into(ofproto.NX_FLOW_MOD_TABLE_ID_PACK_STR,
self.buf, ofproto.NICIRA_HEADER_SIZE,
self.set)
@NiciraHeader.register_nx_subtype(ofproto.NXT_FLOW_REMOVED)
class NXTFlowRemoved(NiciraHeader):
def __init__(self, datapath, cookie, priority, reason,
duration_sec, duration_nsec, idle_timeout, match_len,
packet_count, byte_count, match):
super(NXTFlowRemoved, self).__init__(
datapath, ofproto.NXT_FLOW_REMOVED)
self.cookie = cookie
self.priority = priority
self.reason = reason
self.duration_sec = duration_sec
self.duration_nsec = duration_nsec
self.idle_timeout = idle_timeout
self.match_len = match_len
self.packet_count = packet_count
self.byte_count = byte_count
self.match = match
@classmethod
def parser(cls, datapath, buf, offset):
(cookie, priority, reason, duration_sec, duration_nsec,
idle_timeout, match_len,
packet_count, byte_count) = struct.unpack_from(
ofproto.NX_FLOW_REMOVED_PACK_STR, buf, offset)
offset += (ofproto.NX_FLOW_REMOVED_SIZE
- ofproto.NICIRA_HEADER_SIZE)
match = nx_match.NXMatch.parser(buf, offset, match_len)
return cls(datapath, cookie, priority, reason, duration_sec,
duration_nsec, idle_timeout, match_len, packet_count,
byte_count, match)
class NXTSetPacketInFormat(NiciraHeader):
def __init__(self, datapath, packet_in_format):
super(NXTSetPacketInFormat, self).__init__(
datapath, ofproto.NXT_SET_PACKET_IN_FORMAT)
self.format = packet_in_format
def _serialize_body(self):
self.serialize_header()
msg_pack_into(ofproto.NX_SET_PACKET_IN_FORMAT_PACK_STR,
self.buf, ofproto.NICIRA_HEADER_SIZE,
self.format)
@NiciraHeader.register_nx_subtype(ofproto.NXT_PACKET_IN)
class NXTPacketIn(NiciraHeader):
def __init__(self, datapath, buffer_id, total_len, reason, table_id,
cookie, match_len, match, frame):
super(NXTPacketIn, self).__init__(
datapath, ofproto.NXT_PACKET_IN)
self.buffer_id = buffer_id
self.total_len = total_len
self.reason = reason
self.table_id = table_id
self.cookie = cookie
self.match_len = match_len
self.match = match
self.frame = frame
@classmethod
def parser(cls, datapath, buf, offset):
(buffer_id, total_len, reason, table_id,
cookie, match_len) = struct.unpack_from(
ofproto.NX_PACKET_IN_PACK_STR, buf, offset)
offset += (ofproto.NX_PACKET_IN_SIZE
- ofproto.NICIRA_HEADER_SIZE)
match = nx_match.NXMatch.parser(buf, offset, match_len)
offset += (match_len + 7) / 8 * 8
frame = buf[offset:]
if total_len < len(frame):
frame = frame[:total_len]
return cls(datapath, buffer_id, total_len, reason, table_id,
cookie, match_len, match, frame)
class NXTFlowAge(NiciraHeader):
def __init__(self, datapath):
super(NXTFlowAge, self).__init__(
datapath, ofproto.NXT_FLOW_AGE)
def _serialize_body(self):
self.serialize_header()
class NXTSetAsyncConfig(NiciraHeader):
def __init__(self, datapath, packet_in_mask, port_status_mask,
flow_removed_mask):
super(NXTSetAsyncConfig, self).__init__(
datapath, ofproto.NXT_SET_ASYNC_CONFIG)
self.packet_in_mask = packet_in_mask
self.port_status_mask = port_status_mask
self.flow_removed_mask = flow_removed_mask
def _serialize_body(self):
self.serialize_header()
msg_pack_into(ofproto.NX_ASYNC_CONFIG_PACK_STR,
self.buf, ofproto.NICIRA_HEADER_SIZE,
self.packet_in_mask[0], self.packet_in_mask[1],
self.port_status_mask[0], self.port_status_mask[1],
self.flow_removed_mask[0], self.flow_removed_mask[1])
class NXTSetControllerId(NiciraHeader):
def __init__(self, datapath, controller_id):
super(NXTSetControllerId, self).__init__(
datapath, ofproto.NXT_SET_CONTROLLER_ID)
self.controller_id = controller_id
def _serialize_body(self):
self.serialize_header()
msg_pack_into(ofproto.NX_CONTROLLER_ID_PACK_STR,
self.buf, ofproto.NICIRA_HEADER_SIZE,
self.controller_id)
#
# asymmetric message (datapath -> controller)
# parser only
#
@_register_parser
@_set_msg_type(ofproto.OFPT_FEATURES_REPLY)
class OFPSwitchFeatures(MsgBase):
def __init__(self, datapath, datapath_id=None, n_buffers=None,
n_tables=None, capabilities=None, actions=None, ports=None):
super(OFPSwitchFeatures, self).__init__(datapath)
self.datapath_id = datapath_id
self.n_buffers = n_buffers
self.n_tables = n_tables
self.capabilities = capabilities
self.actions = actions
self.ports = ports
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPSwitchFeatures, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.datapath_id,
msg.n_buffers,
msg.n_tables,
msg.capabilities,
msg.actions) = struct.unpack_from(
ofproto.OFP_SWITCH_FEATURES_PACK_STR, msg.buf,
ofproto.OFP_HEADER_SIZE)
msg.ports = {}
n_ports = ((msg_len - ofproto.OFP_SWITCH_FEATURES_SIZE) /
ofproto.OFP_PHY_PORT_SIZE)
offset = ofproto.OFP_SWITCH_FEATURES_SIZE
for _i in range(n_ports):
port = OFPPhyPort.parser(msg.buf, offset)
# print 'port = %s' % str(port)
msg.ports[port.port_no] = port
offset += ofproto.OFP_PHY_PORT_SIZE
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_PORT_STATUS)
class OFPPortStatus(MsgBase):
def __init__(self, datapath, reason=None, desc=None):
super(OFPPortStatus, self).__init__(datapath)
self.reason = reason
self.desc = desc
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPPortStatus, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.reason = struct.unpack_from(
ofproto.OFP_PORT_STATUS_PACK_STR,
msg.buf, ofproto.OFP_HEADER_SIZE)[0]
msg.desc = OFPPhyPort.parser(msg.buf,
ofproto.OFP_PORT_STATUS_DESC_OFFSET)
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_PACKET_IN)
class OFPPacketIn(MsgBase):
def __init__(self, datapath, buffer_id=None, total_len=None, in_port=None,
reason=None, data=None):
super(OFPPacketIn, self).__init__(datapath)
self.buffer_id = buffer_id
self.total_len = total_len
self.in_port = in_port
self.reason = reason
self.data = data
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPPacketIn, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.buffer_id,
msg.total_len,
msg.in_port,
msg.reason) = struct.unpack_from(
ofproto.OFP_PACKET_IN_PACK_STR,
msg.buf, ofproto.OFP_HEADER_SIZE)
msg.data = msg.buf[ofproto.OFP_PACKET_IN_SIZE:]
if msg.total_len < len(msg.data):
# discard padding for 8-byte alignment of OFP packet
msg.data = msg.data[:msg.total_len]
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_GET_CONFIG_REPLY)
class OFPGetConfigReply(MsgBase):
def __init__(self, datapath):
super(OFPGetConfigReply, self).__init__(datapath)
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPGetConfigReply, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
(msg.flags, msg.miss_send_len) = struct.unpack_from(
ofproto.OFP_SWITCH_CONFIG_PACK_STR,
msg.buf, ofproto.OFP_HEADER_SIZE)
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_BARRIER_REPLY)
class OFPBarrierReply(MsgBase):
def __init__(self, datapath):
super(OFPBarrierReply, self).__init__(datapath)
@_register_parser
@_set_msg_type(ofproto.OFPT_FLOW_REMOVED)
class OFPFlowRemoved(MsgBase):
def __init__(self, datapath):
super(OFPFlowRemoved, self).__init__(datapath)
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPFlowRemoved, cls).parser(datapath, version, msg_type,
msg_len, xid, buf)
msg.match = OFPMatch.parse(msg.buf, ofproto.OFP_HEADER_SIZE)
(msg.cookie,
msg.priority,
msg.reason,
msg.duration_sec,
msg.duration_nsec,
msg.idle_timeout,
msg.packet_count,
msg.byte_count) = struct.unpack_from(
ofproto.OFP_FLOW_REMOVED_PACK_STR0, msg.buf,
ofproto.OFP_HEADER_SIZE + ofproto.OFP_MATCH_SIZE)
return msg
@_register_parser
@_set_msg_type(ofproto.OFPT_QUEUE_GET_CONFIG_REPLY)
class OFPQueueGetConfigReply(MsgBase):
def __init__(self, datapath):
super(OFPQueueGetConfigReply, self).__init__(datapath)
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPQueueGetConfigReply, cls).parser(
datapath, version, msg_type, msg_len, xid, buf)
offset = ofproto.OFP_HEADER_SIZE
(msg.port,) = struct.unpack_from(
ofproto.OFP_QUEUE_GET_CONFIG_REPLY_PACK_STR, msg.buf, offset)
msg.queues = []
offset = ofproto.OFP_QUEUE_GET_CONFIG_REPLY_SIZE
while offset + ofproto.OFP_PACKET_QUEUE_SIZE <= msg_len:
queue = OFPPacketQueue.parser(msg.buf, offset)
msg.queues.append(queue)
offset += queue.len
return msg
def _set_stats_type(stats_type, stats_body_cls):
def _set_cls_stats_type(cls):
cls.cls_stats_type = stats_type
cls.cls_stats_body_cls = stats_body_cls
return cls
return _set_cls_stats_type
@_register_parser
@_set_msg_type(ofproto.OFPT_STATS_REPLY)
class OFPStatsReply(MsgBase):
_STATS_MSG_TYPES = {}
@staticmethod
def register_stats_type(body_single_struct=False):
def _register_stats_type(cls):
assert cls.cls_stats_type is not None
assert cls.cls_stats_type not in OFPStatsReply._STATS_MSG_TYPES
assert cls.cls_stats_body_cls is not None
cls.cls_body_single_struct = body_single_struct
OFPStatsReply._STATS_MSG_TYPES[cls.cls_stats_type] = cls
return cls
return _register_stats_type
def __init__(self, datapath):
super(OFPStatsReply, self).__init__(datapath)
self.type = None
self.flags = None
self.body = None
@classmethod
def parser_stats_body(cls, buf, msg_len, offset):
body_cls = cls.cls_stats_body_cls
body = []
while offset < msg_len:
entry = body_cls.parser(buf, offset)
body.append(entry)
offset += entry.length
if cls.cls_body_single_struct:
return body[0]
return body
@classmethod
def parser_stats(cls, datapath, version, msg_type, msg_len, xid, buf):
# call MsgBase::parser, not OFPStatsReply::parser
msg = MsgBase.parser.__func__(
cls, datapath, version, msg_type, msg_len, xid, buf)
msg.body = msg.parser_stats_body(msg.buf, msg.msg_len,
ofproto.OFP_STATS_MSG_SIZE)
return msg
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
type_, flags = struct.unpack_from(ofproto.OFP_STATS_MSG_PACK_STR,
buffer(buf),
ofproto.OFP_HEADER_SIZE)
stats_type_cls = cls._STATS_MSG_TYPES.get(type_)
msg = stats_type_cls.parser_stats(
datapath, version, msg_type, msg_len, xid, buf)
msg.type = type_
msg.flags = flags
return msg
@OFPStatsReply.register_stats_type(body_single_struct=True)
@_set_stats_type(ofproto.OFPST_DESC, OFPDescStats)
@_set_msg_type(ofproto.OFPT_STATS_REPLY)
class OFPDescStatsReply(OFPStatsReply):
def __init__(self, datapath):
super(OFPDescStatsReply, self).__init__(datapath)
@OFPStatsReply.register_stats_type()
@_set_stats_type(ofproto.OFPST_FLOW, OFPFlowStats)
@_set_msg_type(ofproto.OFPT_STATS_REPLY)
class OFPFlowStatsReply(OFPStatsReply):
def __init__(self, datapath):
super(OFPFlowStatsReply, self).__init__(datapath)
@OFPStatsReply.register_stats_type()
@_set_stats_type(ofproto.OFPST_AGGREGATE, OFPAggregateStats)
@_set_msg_type(ofproto.OFPT_STATS_REPLY)
class OFPAggregateStatsReply(OFPStatsReply):
def __init__(self, datapath):
super(OFPAggregateStatsReply, self).__init__(datapath)
@OFPStatsReply.register_stats_type()
@_set_stats_type(ofproto.OFPST_TABLE, OFPTableStats)
@_set_msg_type(ofproto.OFPT_STATS_REPLY)
class OFPTableStatsReply(OFPStatsReply):
def __init__(self, datapath):
super(OFPTableStatsReply, self).__init__(datapath)
@OFPStatsReply.register_stats_type()
@_set_stats_type(ofproto.OFPST_PORT, OFPPortStats)
@_set_msg_type(ofproto.OFPT_STATS_REPLY)
class OFPPortStatsReply(OFPStatsReply):
def __init__(self, datapath):
super(OFPPortStatsReply, self).__init__(datapath)
@OFPStatsReply.register_stats_type()
@_set_stats_type(ofproto.OFPST_QUEUE, OFPQueueStats)
@_set_msg_type(ofproto.OFPT_STATS_REPLY)
class OFPQueueStatsReply(OFPStatsReply):
def __init__(self, datapath):
super(OFPQueueStatsReply, self).__init__(datapath)
@OFPStatsReply.register_stats_type()
@_set_stats_type(ofproto.OFPST_VENDOR, OFPVendorStats)
@_set_msg_type(ofproto.OFPT_STATS_REPLY)
class OFPVendorStatsReply(OFPStatsReply):
_STATS_VENDORS = {}
@staticmethod
def register_stats_vendor(vendor):
def _register_stats_vendor(cls):
cls.cls_vendor = vendor
OFPVendorStatsReply._STATS_VENDORS[cls.cls_vendor] = cls
return cls
return _register_stats_vendor
def __init__(self, datapath):
super(OFPVendorStatsReply, self).__init__(datapath)
@classmethod
def parser_stats(cls, datapath, version, msg_type, msg_len, xid,
buf):
(type_,) = struct.unpack_from(
ofproto.OFP_VENDOR_STATS_MSG_PACK_STR, buffer(buf),
ofproto.OFP_STATS_MSG_SIZE)
cls_ = cls._STATS_VENDORS.get(type_)
if cls_ is None:
msg = MsgBase.parser.__func__(
cls, datapath, version, msg_type, msg_len, xid, buf)
body_cls = cls.cls_stats_body_cls
body = body_cls.parser(buf,
ofproto.OFP_STATS_MSG_SIZE)
msg.body = body
return msg
return cls_.parser(
datapath, version, msg_type, msg_len, xid, buf,
ofproto.OFP_VENDOR_STATS_MSG_SIZE)
@OFPVendorStatsReply.register_stats_vendor(ofproto.NX_VENDOR_ID)
class NXStatsReply(OFPStatsReply):
_NX_STATS_TYPES = {}
@staticmethod
def register_nx_stats_type(body_single_struct=False):
def _register_nx_stats_type(cls):
assert cls.cls_stats_type is not None
assert cls.cls_stats_type not in \
NXStatsReply._NX_STATS_TYPES
assert cls.cls_stats_body_cls is not None
cls.cls_body_single_struct = body_single_struct
NXStatsReply._NX_STATS_TYPES[cls.cls_stats_type] = cls
return cls
return _register_nx_stats_type
@classmethod
def parser_stats_body(cls, buf, msg_len, offset):
body_cls = cls.cls_stats_body_cls
body = []
while offset < msg_len:
entry = body_cls.parser(buf, offset)
body.append(entry)
offset += entry.length
if cls.cls_body_single_struct:
return body[0]
return body
@classmethod
def parser_stats(cls, datapath, version, msg_type, msg_len, xid,
buf, offset):
msg = MsgBase.parser.__func__(
cls, datapath, version, msg_type, msg_len, xid, buf)
msg.body = msg.parser_stats_body(msg.buf, msg.msg_len, offset)
return msg
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf,
offset):
(type_,) = struct.unpack_from(
ofproto.NX_STATS_MSG_PACK_STR, buffer(buf), offset)
offset += ofproto.NX_STATS_MSG0_SIZE
cls_ = cls._NX_STATS_TYPES.get(type_)
msg = cls_.parser_stats(
datapath, version, msg_type, msg_len, xid, buf, offset)
return msg
@NXStatsReply.register_nx_stats_type()
@_set_stats_type(ofproto.NXST_FLOW, NXFlowStats)
class NXFlowStatsReply(NXStatsReply):
def __init__(self, datapath):
super(NXFlowStatsReply, self).__init__(datapath)
@NXStatsReply.register_nx_stats_type()
@_set_stats_type(ofproto.NXST_AGGREGATE, NXAggregateStats)
class NXAggregateStatsReply(NXStatsReply):
def __init__(self, datapath):
super(NXAggregateStatsReply, self).__init__(datapath)
#
# controller-to-switch message
# serializer only
#
@_set_msg_reply(OFPSwitchFeatures)
@_set_msg_type(ofproto.OFPT_FEATURES_REQUEST)
class OFPFeaturesRequest(MsgBase):
def __init__(self, datapath):
super(OFPFeaturesRequest, self).__init__(datapath)
@_set_msg_type(ofproto.OFPT_GET_CONFIG_REQUEST)
class OFPGetConfigRequest(MsgBase):
def __init__(self, datapath):
super(OFPGetConfigRequest, self).__init__(datapath)
@_set_msg_type(ofproto.OFPT_SET_CONFIG)
class OFPSetConfig(MsgBase):
def __init__(self, datapath, flags=None, miss_send_len=None):
super(OFPSetConfig, self).__init__(datapath)
self.flags = flags
self.miss_send_len = miss_send_len
def _serialize_body(self):
assert self.flags is not None
assert self.miss_send_len is not None
msg_pack_into(ofproto.OFP_SWITCH_CONFIG_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.flags, self.miss_send_len)
@_set_msg_type(ofproto.OFPT_PACKET_OUT)
class OFPPacketOut(MsgBase):
def __init__(self, datapath, buffer_id=None, in_port=None, actions=None,
data=None):
super(OFPPacketOut, self).__init__(datapath)
self.buffer_id = buffer_id
self.in_port = in_port
self._actions_len = None
self.actions = actions
self.data = data
def _serialize_body(self):
assert self.buffer_id is not None
assert self.in_port is not None
assert self.actions is not None
self._actions_len = 0
offset = ofproto.OFP_PACKET_OUT_SIZE
for a in self.actions:
a.serialize(self.buf, offset)
offset += a.len
self._actions_len += a.len
if self.data is not None:
assert self.buffer_id == 0xffffffff
self.buf += self.data
msg_pack_into(ofproto.OFP_PACKET_OUT_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.buffer_id, self.in_port, self._actions_len)
@_set_msg_type(ofproto.OFPT_FLOW_MOD)
class OFPFlowMod(MsgBase):
def __init__(self, datapath, match, cookie, command,
idle_timeout=0, hard_timeout=0,
priority=ofproto.OFP_DEFAULT_PRIORITY,
buffer_id=0xffffffff, out_port=ofproto.OFPP_NONE,
flags=0, actions=None):
if actions is None:
actions = []
super(OFPFlowMod, self).__init__(datapath)
self.match = match
self.cookie = cookie
self.command = command
self.idle_timeout = idle_timeout
self.hard_timeout = hard_timeout
self.priority = priority
self.buffer_id = buffer_id
self.out_port = out_port
self.flags = flags
self.actions = actions
def _serialize_body(self):
offset = ofproto.OFP_HEADER_SIZE
self.match.serialize(self.buf, offset)
offset += ofproto.OFP_MATCH_SIZE
msg_pack_into(ofproto.OFP_FLOW_MOD_PACK_STR0, self.buf, offset,
self.cookie, self.command,
self.idle_timeout, self.hard_timeout,
self.priority, self.buffer_id, self.out_port,
self.flags)
offset = ofproto.OFP_FLOW_MOD_SIZE
if self.actions is not None:
for a in self.actions:
a.serialize(self.buf, offset)
offset += a.len
@_set_msg_type(ofproto.OFPT_PORT_MOD)
class OFPPortMod(MsgBase):
_TYPE = {
'ascii': [
'hw_addr',
]
}
def __init__(self, datapath, port_no=0, hw_addr='00:00:00:00:00:00',
config=0, mask=0, advertise=0):
super(OFPPortMod, self).__init__(datapath)
self.port_no = port_no
self.hw_addr = hw_addr
self.config = config
self.mask = mask
self.advertise = advertise
def _serialize_body(self):
msg_pack_into(ofproto.OFP_PORT_MOD_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.port_no, addrconv.mac.text_to_bin(self.hw_addr),
self.config, self.mask, self.advertise)
@_set_msg_reply(OFPBarrierReply)
@_set_msg_type(ofproto.OFPT_BARRIER_REQUEST)
class OFPBarrierRequest(MsgBase):
def __init__(self, datapath):
super(OFPBarrierRequest, self).__init__(datapath)
@_set_msg_reply(OFPQueueGetConfigReply)
@_set_msg_type(ofproto.OFPT_QUEUE_GET_CONFIG_REQUEST)
class OFPQueueGetConfigRequest(MsgBase):
def __init__(self, datapath, port):
super(OFPQueueGetConfigRequest, self).__init__(datapath)
self.port = port
def _serialize_body(self):
msg_pack_into(ofproto.OFP_QUEUE_GET_CONFIG_REQUEST_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE, self.port)
class OFPStatsRequest(MsgBase):
def __init__(self, datapath, flags):
assert flags == 0 # none yet defined
super(OFPStatsRequest, self).__init__(datapath)
self.type = self.__class__.cls_stats_type
self.flags = flags
def _serialize_stats_body(self):
pass
def _serialize_body(self):
msg_pack_into(ofproto.OFP_STATS_MSG_PACK_STR,
self.buf, ofproto.OFP_HEADER_SIZE,
self.type, self.flags)
self._serialize_stats_body()
@_set_msg_reply(OFPDescStatsReply)
@_set_stats_type(ofproto.OFPST_DESC, OFPDescStats)
@_set_msg_type(ofproto.OFPT_STATS_REQUEST)
class OFPDescStatsRequest(OFPStatsRequest):
def __init__(self, datapath, flags):
super(OFPDescStatsRequest, self).__init__(datapath, flags)
class OFPFlowStatsRequestBase(OFPStatsRequest):
def __init__(self, datapath, flags, match, table_id, out_port):
super(OFPFlowStatsRequestBase, self).__init__(datapath, flags)
self.match = match
self.table_id = table_id
self.out_port = out_port
def _serialize_stats_body(self):
offset = ofproto.OFP_STATS_MSG_SIZE
self.match.serialize(self.buf, offset)
offset += ofproto.OFP_MATCH_SIZE
msg_pack_into(ofproto.OFP_FLOW_STATS_REQUEST_ID_PORT_STR,
self.buf, offset, self.table_id, self.out_port)
@_set_msg_reply(OFPFlowStatsReply)
@_set_stats_type(ofproto.OFPST_FLOW, OFPFlowStats)
@_set_msg_type(ofproto.OFPT_STATS_REQUEST)
class OFPFlowStatsRequest(OFPFlowStatsRequestBase):
def __init__(self, datapath, flags, match, table_id, out_port):
super(OFPFlowStatsRequest, self).__init__(
datapath, flags, match, table_id, out_port)
@_set_msg_reply(OFPAggregateStatsReply)
@_set_stats_type(ofproto.OFPST_AGGREGATE, OFPAggregateStats)
@_set_msg_type(ofproto.OFPT_STATS_REQUEST)
class OFPAggregateStatsRequest(OFPFlowStatsRequestBase):
def __init__(self, datapath, flags, match, table_id, out_port):
super(OFPAggregateStatsRequest, self).__init__(
datapath, flags, match, table_id, out_port)
@_set_msg_reply(OFPTableStatsReply)
@_set_stats_type(ofproto.OFPST_TABLE, OFPTableStats)
@_set_msg_type(ofproto.OFPT_STATS_REQUEST)
class OFPTableStatsRequest(OFPStatsRequest):
def __init__(self, datapath, flags):
super(OFPTableStatsRequest, self).__init__(datapath, flags)
@_set_msg_reply(OFPPortStatsReply)
@_set_stats_type(ofproto.OFPST_PORT, OFPPortStats)
@_set_msg_type(ofproto.OFPT_STATS_REQUEST)
class OFPPortStatsRequest(OFPStatsRequest):
def __init__(self, datapath, flags, port_no):
super(OFPPortStatsRequest, self).__init__(datapath, flags)
self.port_no = port_no
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_PORT_STATS_REQUEST_PACK_STR,
self.buf, ofproto.OFP_STATS_MSG_SIZE, self.port_no)
@_set_msg_reply(OFPQueueStatsReply)
@_set_stats_type(ofproto.OFPST_QUEUE, OFPQueueStats)
@_set_msg_type(ofproto.OFPT_STATS_REQUEST)
class OFPQueueStatsRequest(OFPStatsRequest):
def __init__(self, datapath, flags, port_no, queue_id):
super(OFPQueueStatsRequest, self).__init__(datapath, flags)
self.port_no = port_no
self.queue_id = queue_id
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_QUEUE_STATS_REQUEST_PACK_STR,
self.buf, ofproto.OFP_STATS_MSG_SIZE,
self.port_no, self.queue_id)
@_set_msg_reply(OFPVendorStatsReply)
@_set_stats_type(ofproto.OFPST_VENDOR, OFPVendorStats)
@_set_msg_type(ofproto.OFPT_STATS_REQUEST)
class OFPVendorStatsRequest(OFPStatsRequest):
def __init__(self, datapath, flags, vendor, specific_data=None):
super(OFPVendorStatsRequest, self).__init__(datapath, flags)
self.vendor = vendor
self.specific_data = specific_data
def _serialize_vendor_stats(self):
self.buf += self.specific_data
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_VENDOR_STATS_MSG_PACK_STR,
self.buf, ofproto.OFP_STATS_MSG_SIZE,
self.vendor)
self._serialize_vendor_stats()
class NXStatsRequest(OFPVendorStatsRequest):
def __init__(self, datapath, flags, subtype):
super(NXStatsRequest, self).__init__(datapath, flags,
ofproto.NX_VENDOR_ID)
self.subtype = subtype
def _serialize_vendor_stats_body(self):
pass
def _serialize_vendor_stats(self):
msg_pack_into(ofproto.NX_STATS_MSG_PACK_STR, self.buf,
ofproto.OFP_VENDOR_STATS_MSG_SIZE,
self.subtype)
self._serialize_vendor_stats_body()
class NXFlowStatsRequest(NXStatsRequest):
def __init__(self, datapath, flags, out_port, table_id, rule=None):
super(NXFlowStatsRequest, self).__init__(datapath, flags,
ofproto.NXST_FLOW)
self.out_port = out_port
self.table_id = table_id
self.rule = rule
self.match_len = 0
def _serialize_vendor_stats_body(self):
if self.rule is not None:
offset = ofproto.NX_STATS_MSG_SIZE + \
ofproto.NX_FLOW_STATS_REQUEST_SIZE
self.match_len = nx_match.serialize_nxm_match(
self.rule, self.buf, offset)
msg_pack_into(
ofproto.NX_FLOW_STATS_REQUEST_PACK_STR,
self.buf, ofproto.NX_STATS_MSG_SIZE, self.out_port,
self.match_len, self.table_id)
class NXAggregateStatsRequest(NXStatsRequest):
def __init__(self, datapath, flags, out_port, table_id, rule=None):
super(NXAggregateStatsRequest, self).__init__(
datapath, flags, ofproto.NXST_AGGREGATE)
self.out_port = out_port
self.table_id = table_id
self.rule = rule
self.match_len = 0
def _serialize_vendor_stats_body(self):
if self.rule is not None:
offset = ofproto.NX_STATS_MSG_SIZE + \
ofproto.NX_AGGREGATE_STATS_REQUEST_SIZE
self.match_len = nx_match.serialize_nxm_match(
self.rule, self.buf, offset)
msg_pack_into(
ofproto.NX_AGGREGATE_STATS_REQUEST_PACK_STR,
self.buf, ofproto.NX_STATS_MSG_SIZE, self.out_port,
self.match_len, self.table_id)
| apache-2.0 | 6,000,006,826,050,707,000 | -4,686,503,290,360,776,000 | 33.568105 | 79 | 0.601867 | false |
c0hen/django-venv | lib/python3.4/site-packages/pip/utils/build.py | 899 | 1312 | from __future__ import absolute_import
import os.path
import tempfile
from pip.utils import rmtree
class BuildDirectory(object):
def __init__(self, name=None, delete=None):
# If we were not given an explicit directory, and we were not given an
# explicit delete option, then we'll default to deleting.
if name is None and delete is None:
delete = True
if name is None:
# We realpath here because some systems have their default tmpdir
# symlinked to another directory. This tends to confuse build
# scripts, so we canonicalize the path by traversing potential
# symlinks here.
name = os.path.realpath(tempfile.mkdtemp(prefix="pip-build-"))
# If we were not given an explicit directory, and we were not given
# an explicit delete option, then we'll default to deleting.
if delete is None:
delete = True
self.name = name
self.delete = delete
def __repr__(self):
return "<{} {!r}>".format(self.__class__.__name__, self.name)
def __enter__(self):
return self.name
def __exit__(self, exc, value, tb):
self.cleanup()
def cleanup(self):
if self.delete:
rmtree(self.name)
| gpl-3.0 | 1,884,896,048,904,171,800 | -2,051,273,585,512,437,800 | 30.238095 | 79 | 0.602134 | false |
wkentaro/docopt | examples/options_example.py | 18 | 1597 | """Example of program with many options using docopt.
Usage:
options_example.py [-hvqrf NAME] [--exclude=PATTERNS]
[--select=ERRORS | --ignore=ERRORS] [--show-source]
[--statistics] [--count] [--benchmark] PATH...
options_example.py (--doctest | --testsuite=DIR)
options_example.py --version
Arguments:
PATH destination path
Options:
-h --help show this help message and exit
--version show version and exit
-v --verbose print status messages
-q --quiet report only file names
-r --repeat show all occurrences of the same error
--exclude=PATTERNS exclude files or directories which match these comma
separated patterns [default: .svn,CVS,.bzr,.hg,.git]
-f NAME --file=NAME when parsing directories, only check filenames matching
these comma separated patterns [default: *.py]
--select=ERRORS select errors and warnings (e.g. E,W6)
--ignore=ERRORS skip errors and warnings (e.g. E4,W)
--show-source show source code for each error
--statistics count errors and warnings
--count print total number of errors and warnings to standard
error and set exit code to 1 if total is not null
--benchmark measure processing speed
--testsuite=DIR run regression tests from dir
--doctest run doctest on myself
"""
from docopt import docopt
if __name__ == '__main__':
arguments = docopt(__doc__, version='1.0.0rc2')
print(arguments)
| mit | 1,248,434,430,989,388,000 | -7,450,143,275,215,263,000 | 39.948718 | 78 | 0.61866 | false |
bingosummer/azure-linux-extensions | DSC/azure/__init__.py | 46 | 33598 | #-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import ast
import base64
import hashlib
import hmac
import sys
import types
import warnings
import inspect
if sys.version_info < (3,):
from urllib2 import quote as url_quote
from urllib2 import unquote as url_unquote
_strtype = basestring
else:
from urllib.parse import quote as url_quote
from urllib.parse import unquote as url_unquote
_strtype = str
from datetime import datetime
from xml.dom import minidom
from xml.sax.saxutils import escape as xml_escape
#--------------------------------------------------------------------------
# constants
__author__ = 'Microsoft Corp. <ptvshelp@microsoft.com>'
__version__ = '0.8.4'
# Live ServiceClient URLs
BLOB_SERVICE_HOST_BASE = '.blob.core.windows.net'
QUEUE_SERVICE_HOST_BASE = '.queue.core.windows.net'
TABLE_SERVICE_HOST_BASE = '.table.core.windows.net'
SERVICE_BUS_HOST_BASE = '.servicebus.windows.net'
MANAGEMENT_HOST = 'management.core.windows.net'
# Development ServiceClient URLs
DEV_BLOB_HOST = '127.0.0.1:10000'
DEV_QUEUE_HOST = '127.0.0.1:10001'
DEV_TABLE_HOST = '127.0.0.1:10002'
# Default credentials for Development Storage Service
DEV_ACCOUNT_NAME = 'devstoreaccount1'
DEV_ACCOUNT_KEY = 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw=='
# All of our error messages
_ERROR_CANNOT_FIND_PARTITION_KEY = 'Cannot find partition key in request.'
_ERROR_CANNOT_FIND_ROW_KEY = 'Cannot find row key in request.'
_ERROR_INCORRECT_TABLE_IN_BATCH = \
'Table should be the same in a batch operations'
_ERROR_INCORRECT_PARTITION_KEY_IN_BATCH = \
'Partition Key should be the same in a batch operations'
_ERROR_DUPLICATE_ROW_KEY_IN_BATCH = \
'Row Keys should not be the same in a batch operations'
_ERROR_BATCH_COMMIT_FAIL = 'Batch Commit Fail'
_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_DELETE = \
'Message is not peek locked and cannot be deleted.'
_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_UNLOCK = \
'Message is not peek locked and cannot be unlocked.'
_ERROR_QUEUE_NOT_FOUND = 'Queue was not found'
_ERROR_TOPIC_NOT_FOUND = 'Topic was not found'
_ERROR_CONFLICT = 'Conflict ({0})'
_ERROR_NOT_FOUND = 'Not found ({0})'
_ERROR_UNKNOWN = 'Unknown error ({0})'
_ERROR_SERVICEBUS_MISSING_INFO = \
'You need to provide servicebus namespace, access key and Issuer'
_ERROR_STORAGE_MISSING_INFO = \
'You need to provide both account name and access key'
_ERROR_ACCESS_POLICY = \
'share_access_policy must be either SignedIdentifier or AccessPolicy ' + \
'instance'
_WARNING_VALUE_SHOULD_BE_BYTES = \
'Warning: {0} must be bytes data type. It will be converted ' + \
'automatically, with utf-8 text encoding.'
_ERROR_VALUE_SHOULD_BE_BYTES = '{0} should be of type bytes.'
_ERROR_VALUE_NONE = '{0} should not be None.'
_ERROR_VALUE_NEGATIVE = '{0} should not be negative.'
_ERROR_CANNOT_SERIALIZE_VALUE_TO_ENTITY = \
'Cannot serialize the specified value ({0}) to an entity. Please use ' + \
'an EntityProperty (which can specify custom types), int, str, bool, ' + \
'or datetime.'
_ERROR_PAGE_BLOB_SIZE_ALIGNMENT = \
'Invalid page blob size: {0}. ' + \
'The size must be aligned to a 512-byte boundary.'
_USER_AGENT_STRING = 'pyazure/' + __version__
METADATA_NS = 'http://schemas.microsoft.com/ado/2007/08/dataservices/metadata'
class WindowsAzureData(object):
''' This is the base of data class.
It is only used to check whether it is instance or not. '''
pass
class WindowsAzureError(Exception):
''' WindowsAzure Excpetion base class. '''
def __init__(self, message):
super(WindowsAzureError, self).__init__(message)
class WindowsAzureConflictError(WindowsAzureError):
'''Indicates that the resource could not be created because it already
exists'''
def __init__(self, message):
super(WindowsAzureConflictError, self).__init__(message)
class WindowsAzureMissingResourceError(WindowsAzureError):
'''Indicates that a request for a request for a resource (queue, table,
container, etc...) failed because the specified resource does not exist'''
def __init__(self, message):
super(WindowsAzureMissingResourceError, self).__init__(message)
class WindowsAzureBatchOperationError(WindowsAzureError):
'''Indicates that a batch operation failed'''
def __init__(self, message, code):
super(WindowsAzureBatchOperationError, self).__init__(message)
self.code = code
class Feed(object):
pass
class _Base64String(str):
pass
class HeaderDict(dict):
def __getitem__(self, index):
return super(HeaderDict, self).__getitem__(index.lower())
def _encode_base64(data):
if isinstance(data, _unicode_type):
data = data.encode('utf-8')
encoded = base64.b64encode(data)
return encoded.decode('utf-8')
def _decode_base64_to_bytes(data):
if isinstance(data, _unicode_type):
data = data.encode('utf-8')
return base64.b64decode(data)
def _decode_base64_to_text(data):
decoded_bytes = _decode_base64_to_bytes(data)
return decoded_bytes.decode('utf-8')
def _get_readable_id(id_name, id_prefix_to_skip):
"""simplified an id to be more friendly for us people"""
# id_name is in the form 'https://namespace.host.suffix/name'
# where name may contain a forward slash!
pos = id_name.find('//')
if pos != -1:
pos += 2
if id_prefix_to_skip:
pos = id_name.find(id_prefix_to_skip, pos)
if pos != -1:
pos += len(id_prefix_to_skip)
pos = id_name.find('/', pos)
if pos != -1:
return id_name[pos + 1:]
return id_name
def _get_entry_properties_from_node(entry, include_id, id_prefix_to_skip=None, use_title_as_id=False):
''' get properties from entry xml '''
properties = {}
etag = entry.getAttributeNS(METADATA_NS, 'etag')
if etag:
properties['etag'] = etag
for updated in _get_child_nodes(entry, 'updated'):
properties['updated'] = updated.firstChild.nodeValue
for name in _get_children_from_path(entry, 'author', 'name'):
if name.firstChild is not None:
properties['author'] = name.firstChild.nodeValue
if include_id:
if use_title_as_id:
for title in _get_child_nodes(entry, 'title'):
properties['name'] = title.firstChild.nodeValue
else:
for id in _get_child_nodes(entry, 'id'):
properties['name'] = _get_readable_id(
id.firstChild.nodeValue, id_prefix_to_skip)
return properties
def _get_entry_properties(xmlstr, include_id, id_prefix_to_skip=None):
''' get properties from entry xml '''
xmldoc = minidom.parseString(xmlstr)
properties = {}
for entry in _get_child_nodes(xmldoc, 'entry'):
properties.update(_get_entry_properties_from_node(entry, include_id, id_prefix_to_skip))
return properties
def _get_first_child_node_value(parent_node, node_name):
xml_attrs = _get_child_nodes(parent_node, node_name)
if xml_attrs:
xml_attr = xml_attrs[0]
if xml_attr.firstChild:
value = xml_attr.firstChild.nodeValue
return value
def _get_child_nodes(node, tagName):
return [childNode for childNode in node.getElementsByTagName(tagName)
if childNode.parentNode == node]
def _get_children_from_path(node, *path):
'''descends through a hierarchy of nodes returning the list of children
at the inner most level. Only returns children who share a common parent,
not cousins.'''
cur = node
for index, child in enumerate(path):
if isinstance(child, _strtype):
next = _get_child_nodes(cur, child)
else:
next = _get_child_nodesNS(cur, *child)
if index == len(path) - 1:
return next
elif not next:
break
cur = next[0]
return []
def _get_child_nodesNS(node, ns, tagName):
return [childNode for childNode in node.getElementsByTagNameNS(ns, tagName)
if childNode.parentNode == node]
def _create_entry(entry_body):
''' Adds common part of entry to a given entry body and return the whole
xml. '''
updated_str = datetime.utcnow().isoformat()
if datetime.utcnow().utcoffset() is None:
updated_str += '+00:00'
entry_start = '''<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<entry xmlns:d="http://schemas.microsoft.com/ado/2007/08/dataservices" xmlns:m="http://schemas.microsoft.com/ado/2007/08/dataservices/metadata" xmlns="http://www.w3.org/2005/Atom" >
<title /><updated>{updated}</updated><author><name /></author><id />
<content type="application/xml">
{body}</content></entry>'''
return entry_start.format(updated=updated_str, body=entry_body)
def _to_datetime(strtime):
return datetime.strptime(strtime, "%Y-%m-%dT%H:%M:%S.%f")
_KNOWN_SERIALIZATION_XFORMS = {
'include_apis': 'IncludeAPIs',
'message_id': 'MessageId',
'content_md5': 'Content-MD5',
'last_modified': 'Last-Modified',
'cache_control': 'Cache-Control',
'account_admin_live_email_id': 'AccountAdminLiveEmailId',
'service_admin_live_email_id': 'ServiceAdminLiveEmailId',
'subscription_id': 'SubscriptionID',
'fqdn': 'FQDN',
'private_id': 'PrivateID',
'os_virtual_hard_disk': 'OSVirtualHardDisk',
'logical_disk_size_in_gb': 'LogicalDiskSizeInGB',
'logical_size_in_gb': 'LogicalSizeInGB',
'os': 'OS',
'persistent_vm_downtime_info': 'PersistentVMDowntimeInfo',
'copy_id': 'CopyId',
}
def _get_serialization_name(element_name):
"""converts a Python name into a serializable name"""
known = _KNOWN_SERIALIZATION_XFORMS.get(element_name)
if known is not None:
return known
if element_name.startswith('x_ms_'):
return element_name.replace('_', '-')
if element_name.endswith('_id'):
element_name = element_name.replace('_id', 'ID')
for name in ['content_', 'last_modified', 'if_', 'cache_control']:
if element_name.startswith(name):
element_name = element_name.replace('_', '-_')
return ''.join(name.capitalize() for name in element_name.split('_'))
if sys.version_info < (3,):
_unicode_type = unicode
def _str(value):
if isinstance(value, unicode):
return value.encode('utf-8')
return str(value)
else:
_str = str
_unicode_type = str
def _str_or_none(value):
if value is None:
return None
return _str(value)
def _int_or_none(value):
if value is None:
return None
return str(int(value))
def _bool_or_none(value):
if value is None:
return None
if isinstance(value, bool):
if value:
return 'true'
else:
return 'false'
return str(value)
def _convert_class_to_xml(source, xml_prefix=True):
if source is None:
return ''
xmlstr = ''
if xml_prefix:
xmlstr = '<?xml version="1.0" encoding="utf-8"?>'
if isinstance(source, list):
for value in source:
xmlstr += _convert_class_to_xml(value, False)
elif isinstance(source, WindowsAzureData):
class_name = source.__class__.__name__
xmlstr += '<' + class_name + '>'
for name, value in vars(source).items():
if value is not None:
if isinstance(value, list) or \
isinstance(value, WindowsAzureData):
xmlstr += _convert_class_to_xml(value, False)
else:
xmlstr += ('<' + _get_serialization_name(name) + '>' +
xml_escape(str(value)) + '</' +
_get_serialization_name(name) + '>')
xmlstr += '</' + class_name + '>'
return xmlstr
def _find_namespaces_from_child(parent, child, namespaces):
"""Recursively searches from the parent to the child,
gathering all the applicable namespaces along the way"""
for cur_child in parent.childNodes:
if cur_child is child:
return True
if _find_namespaces_from_child(cur_child, child, namespaces):
# we are the parent node
for key in cur_child.attributes.keys():
if key.startswith('xmlns:') or key == 'xmlns':
namespaces[key] = cur_child.attributes[key]
break
return False
def _find_namespaces(parent, child):
res = {}
for key in parent.documentElement.attributes.keys():
if key.startswith('xmlns:') or key == 'xmlns':
res[key] = parent.documentElement.attributes[key]
_find_namespaces_from_child(parent, child, res)
return res
def _clone_node_with_namespaces(node_to_clone, original_doc):
clone = node_to_clone.cloneNode(True)
for key, value in _find_namespaces(original_doc, node_to_clone).items():
clone.attributes[key] = value
return clone
def _convert_response_to_feeds(response, convert_callback):
if response is None:
return None
feeds = _list_of(Feed)
x_ms_continuation = HeaderDict()
for name, value in response.headers:
if 'x-ms-continuation' in name:
x_ms_continuation[name[len('x-ms-continuation') + 1:]] = value
if x_ms_continuation:
setattr(feeds, 'x_ms_continuation', x_ms_continuation)
xmldoc = minidom.parseString(response.body)
xml_entries = _get_children_from_path(xmldoc, 'feed', 'entry')
if not xml_entries:
# in some cases, response contains only entry but no feed
xml_entries = _get_children_from_path(xmldoc, 'entry')
if inspect.isclass(convert_callback) and issubclass(convert_callback, WindowsAzureData):
for xml_entry in xml_entries:
return_obj = convert_callback()
for node in _get_children_from_path(xml_entry,
'content',
convert_callback.__name__):
_fill_data_to_return_object(node, return_obj)
for name, value in _get_entry_properties_from_node(xml_entry,
include_id=True,
use_title_as_id=True).items():
setattr(return_obj, name, value)
feeds.append(return_obj)
else:
for xml_entry in xml_entries:
new_node = _clone_node_with_namespaces(xml_entry, xmldoc)
feeds.append(convert_callback(new_node.toxml('utf-8')))
return feeds
def _validate_type_bytes(param_name, param):
if not isinstance(param, bytes):
raise TypeError(_ERROR_VALUE_SHOULD_BE_BYTES.format(param_name))
def _validate_not_none(param_name, param):
if param is None:
raise TypeError(_ERROR_VALUE_NONE.format(param_name))
def _fill_list_of(xmldoc, element_type, xml_element_name):
xmlelements = _get_child_nodes(xmldoc, xml_element_name)
return [_parse_response_body_from_xml_node(xmlelement, element_type) \
for xmlelement in xmlelements]
def _fill_scalar_list_of(xmldoc, element_type, parent_xml_element_name,
xml_element_name):
'''Converts an xml fragment into a list of scalar types. The parent xml
element contains a flat list of xml elements which are converted into the
specified scalar type and added to the list.
Example:
xmldoc=
<Endpoints>
<Endpoint>http://{storage-service-name}.blob.core.windows.net/</Endpoint>
<Endpoint>http://{storage-service-name}.queue.core.windows.net/</Endpoint>
<Endpoint>http://{storage-service-name}.table.core.windows.net/</Endpoint>
</Endpoints>
element_type=str
parent_xml_element_name='Endpoints'
xml_element_name='Endpoint'
'''
xmlelements = _get_child_nodes(xmldoc, parent_xml_element_name)
if xmlelements:
xmlelements = _get_child_nodes(xmlelements[0], xml_element_name)
return [_get_node_value(xmlelement, element_type) \
for xmlelement in xmlelements]
def _fill_dict(xmldoc, element_name):
xmlelements = _get_child_nodes(xmldoc, element_name)
if xmlelements:
return_obj = {}
for child in xmlelements[0].childNodes:
if child.firstChild:
return_obj[child.nodeName] = child.firstChild.nodeValue
return return_obj
def _fill_dict_of(xmldoc, parent_xml_element_name, pair_xml_element_name,
key_xml_element_name, value_xml_element_name):
'''Converts an xml fragment into a dictionary. The parent xml element
contains a list of xml elements where each element has a child element for
the key, and another for the value.
Example:
xmldoc=
<ExtendedProperties>
<ExtendedProperty>
<Name>Ext1</Name>
<Value>Val1</Value>
</ExtendedProperty>
<ExtendedProperty>
<Name>Ext2</Name>
<Value>Val2</Value>
</ExtendedProperty>
</ExtendedProperties>
element_type=str
parent_xml_element_name='ExtendedProperties'
pair_xml_element_name='ExtendedProperty'
key_xml_element_name='Name'
value_xml_element_name='Value'
'''
return_obj = {}
xmlelements = _get_child_nodes(xmldoc, parent_xml_element_name)
if xmlelements:
xmlelements = _get_child_nodes(xmlelements[0], pair_xml_element_name)
for pair in xmlelements:
keys = _get_child_nodes(pair, key_xml_element_name)
values = _get_child_nodes(pair, value_xml_element_name)
if keys and values:
key = keys[0].firstChild.nodeValue
value = values[0].firstChild.nodeValue
return_obj[key] = value
return return_obj
def _fill_instance_child(xmldoc, element_name, return_type):
'''Converts a child of the current dom element to the specified type.
'''
xmlelements = _get_child_nodes(
xmldoc, _get_serialization_name(element_name))
if not xmlelements:
return None
return_obj = return_type()
_fill_data_to_return_object(xmlelements[0], return_obj)
return return_obj
def _fill_instance_element(element, return_type):
"""Converts a DOM element into the specified object"""
return _parse_response_body_from_xml_node(element, return_type)
def _fill_data_minidom(xmldoc, element_name, data_member):
xmlelements = _get_child_nodes(
xmldoc, _get_serialization_name(element_name))
if not xmlelements or not xmlelements[0].childNodes:
return None
value = xmlelements[0].firstChild.nodeValue
if data_member is None:
return value
elif isinstance(data_member, datetime):
return _to_datetime(value)
elif type(data_member) is bool:
return value.lower() != 'false'
else:
return type(data_member)(value)
def _get_node_value(xmlelement, data_type):
value = xmlelement.firstChild.nodeValue
if data_type is datetime:
return _to_datetime(value)
elif data_type is bool:
return value.lower() != 'false'
else:
return data_type(value)
def _get_request_body_bytes_only(param_name, param_value):
'''Validates the request body passed in and converts it to bytes
if our policy allows it.'''
if param_value is None:
return b''
if isinstance(param_value, bytes):
return param_value
# Previous versions of the SDK allowed data types other than bytes to be
# passed in, and they would be auto-converted to bytes. We preserve this
# behavior when running under 2.7, but issue a warning.
# Python 3 support is new, so we reject anything that's not bytes.
if sys.version_info < (3,):
warnings.warn(_WARNING_VALUE_SHOULD_BE_BYTES.format(param_name))
return _get_request_body(param_value)
raise TypeError(_ERROR_VALUE_SHOULD_BE_BYTES.format(param_name))
def _get_request_body(request_body):
'''Converts an object into a request body. If it's None
we'll return an empty string, if it's one of our objects it'll
convert it to XML and return it. Otherwise we just use the object
directly'''
if request_body is None:
return b''
if isinstance(request_body, WindowsAzureData):
request_body = _convert_class_to_xml(request_body)
if isinstance(request_body, bytes):
return request_body
if isinstance(request_body, _unicode_type):
return request_body.encode('utf-8')
request_body = str(request_body)
if isinstance(request_body, _unicode_type):
return request_body.encode('utf-8')
return request_body
def _parse_enum_results_list(response, return_type, resp_type, item_type):
"""resp_body is the XML we received
resp_type is a string, such as Containers,
return_type is the type we're constructing, such as ContainerEnumResults
item_type is the type object of the item to be created, such as Container
This function then returns a ContainerEnumResults object with the
containers member populated with the results.
"""
# parsing something like:
# <EnumerationResults ... >
# <Queues>
# <Queue>
# <Something />
# <SomethingElse />
# </Queue>
# </Queues>
# </EnumerationResults>
respbody = response.body
return_obj = return_type()
doc = minidom.parseString(respbody)
items = []
for enum_results in _get_child_nodes(doc, 'EnumerationResults'):
# path is something like Queues, Queue
for child in _get_children_from_path(enum_results,
resp_type,
resp_type[:-1]):
items.append(_fill_instance_element(child, item_type))
for name, value in vars(return_obj).items():
# queues, Queues, this is the list its self which we populated
# above
if name == resp_type.lower():
# the list its self.
continue
value = _fill_data_minidom(enum_results, name, value)
if value is not None:
setattr(return_obj, name, value)
setattr(return_obj, resp_type.lower(), items)
return return_obj
def _parse_simple_list(response, type, item_type, list_name):
respbody = response.body
res = type()
res_items = []
doc = minidom.parseString(respbody)
type_name = type.__name__
item_name = item_type.__name__
for item in _get_children_from_path(doc, type_name, item_name):
res_items.append(_fill_instance_element(item, item_type))
setattr(res, list_name, res_items)
return res
def _parse_response(response, return_type):
'''
Parse the HTTPResponse's body and fill all the data into a class of
return_type.
'''
return _parse_response_body_from_xml_text(response.body, return_type)
def _parse_service_resources_response(response, return_type):
'''
Parse the HTTPResponse's body and fill all the data into a class of
return_type.
'''
return _parse_response_body_from_service_resources_xml_text(response.body, return_type)
def _fill_data_to_return_object(node, return_obj):
members = dict(vars(return_obj))
for name, value in members.items():
if isinstance(value, _list_of):
setattr(return_obj,
name,
_fill_list_of(node,
value.list_type,
value.xml_element_name))
elif isinstance(value, _scalar_list_of):
setattr(return_obj,
name,
_fill_scalar_list_of(node,
value.list_type,
_get_serialization_name(name),
value.xml_element_name))
elif isinstance(value, _dict_of):
setattr(return_obj,
name,
_fill_dict_of(node,
_get_serialization_name(name),
value.pair_xml_element_name,
value.key_xml_element_name,
value.value_xml_element_name))
elif isinstance(value, _xml_attribute):
real_value = None
if node.hasAttribute(value.xml_element_name):
real_value = node.getAttribute(value.xml_element_name)
if real_value is not None:
setattr(return_obj, name, real_value)
elif isinstance(value, WindowsAzureData):
setattr(return_obj,
name,
_fill_instance_child(node, name, value.__class__))
elif isinstance(value, dict):
setattr(return_obj,
name,
_fill_dict(node, _get_serialization_name(name)))
elif isinstance(value, _Base64String):
value = _fill_data_minidom(node, name, '')
if value is not None:
value = _decode_base64_to_text(value)
# always set the attribute, so we don't end up returning an object
# with type _Base64String
setattr(return_obj, name, value)
else:
value = _fill_data_minidom(node, name, value)
if value is not None:
setattr(return_obj, name, value)
def _parse_response_body_from_xml_node(node, return_type):
'''
parse the xml and fill all the data into a class of return_type
'''
return_obj = return_type()
_fill_data_to_return_object(node, return_obj)
return return_obj
def _parse_response_body_from_xml_text(respbody, return_type):
'''
parse the xml and fill all the data into a class of return_type
'''
doc = minidom.parseString(respbody)
return_obj = return_type()
xml_name = return_type._xml_name if hasattr(return_type, '_xml_name') else return_type.__name__
for node in _get_child_nodes(doc, xml_name):
_fill_data_to_return_object(node, return_obj)
return return_obj
def _parse_response_body_from_service_resources_xml_text(respbody, return_type):
'''
parse the xml and fill all the data into a class of return_type
'''
doc = minidom.parseString(respbody)
return_obj = _list_of(return_type)
for node in _get_children_from_path(doc, "ServiceResources", "ServiceResource"):
local_obj = return_type()
_fill_data_to_return_object(node, local_obj)
return_obj.append(local_obj)
return return_obj
class _dict_of(dict):
"""a dict which carries with it the xml element names for key,val.
Used for deserializaion and construction of the lists"""
def __init__(self, pair_xml_element_name, key_xml_element_name,
value_xml_element_name):
self.pair_xml_element_name = pair_xml_element_name
self.key_xml_element_name = key_xml_element_name
self.value_xml_element_name = value_xml_element_name
super(_dict_of, self).__init__()
class _list_of(list):
"""a list which carries with it the type that's expected to go in it.
Used for deserializaion and construction of the lists"""
def __init__(self, list_type, xml_element_name=None):
self.list_type = list_type
if xml_element_name is None:
self.xml_element_name = list_type.__name__
else:
self.xml_element_name = xml_element_name
super(_list_of, self).__init__()
class _scalar_list_of(list):
"""a list of scalar types which carries with it the type that's
expected to go in it along with its xml element name.
Used for deserializaion and construction of the lists"""
def __init__(self, list_type, xml_element_name):
self.list_type = list_type
self.xml_element_name = xml_element_name
super(_scalar_list_of, self).__init__()
class _xml_attribute:
"""a accessor to XML attributes
expected to go in it along with its xml element name.
Used for deserialization and construction"""
def __init__(self, xml_element_name):
self.xml_element_name = xml_element_name
def _update_request_uri_query_local_storage(request, use_local_storage):
''' create correct uri and query for the request '''
uri, query = _update_request_uri_query(request)
if use_local_storage:
return '/' + DEV_ACCOUNT_NAME + uri, query
return uri, query
def _update_request_uri_query(request):
'''pulls the query string out of the URI and moves it into
the query portion of the request object. If there are already
query parameters on the request the parameters in the URI will
appear after the existing parameters'''
if '?' in request.path:
request.path, _, query_string = request.path.partition('?')
if query_string:
query_params = query_string.split('&')
for query in query_params:
if '=' in query:
name, _, value = query.partition('=')
request.query.append((name, value))
request.path = url_quote(request.path, '/()$=\',')
# add encoded queries to request.path.
if request.query:
request.path += '?'
for name, value in request.query:
if value is not None:
request.path += name + '=' + url_quote(value, '/()$=\',') + '&'
request.path = request.path[:-1]
return request.path, request.query
def _dont_fail_on_exist(error):
''' don't throw exception if the resource exists.
This is called by create_* APIs with fail_on_exist=False'''
if isinstance(error, WindowsAzureConflictError):
return False
else:
raise error
def _dont_fail_not_exist(error):
''' don't throw exception if the resource doesn't exist.
This is called by create_* APIs with fail_on_exist=False'''
if isinstance(error, WindowsAzureMissingResourceError):
return False
else:
raise error
def _general_error_handler(http_error):
''' Simple error handler for azure.'''
if http_error.status == 409:
raise WindowsAzureConflictError(
_ERROR_CONFLICT.format(str(http_error)))
elif http_error.status == 404:
raise WindowsAzureMissingResourceError(
_ERROR_NOT_FOUND.format(str(http_error)))
else:
if http_error.respbody is not None:
raise WindowsAzureError(
_ERROR_UNKNOWN.format(str(http_error)) + '\n' + \
http_error.respbody.decode('utf-8'))
else:
raise WindowsAzureError(_ERROR_UNKNOWN.format(str(http_error)))
def _parse_response_for_dict(response):
''' Extracts name-values from response header. Filter out the standard
http headers.'''
if response is None:
return None
http_headers = ['server', 'date', 'location', 'host',
'via', 'proxy-connection', 'connection']
return_dict = HeaderDict()
if response.headers:
for name, value in response.headers:
if not name.lower() in http_headers:
return_dict[name] = value
return return_dict
def _parse_response_for_dict_prefix(response, prefixes):
''' Extracts name-values for names starting with prefix from response
header. Filter out the standard http headers.'''
if response is None:
return None
return_dict = {}
orig_dict = _parse_response_for_dict(response)
if orig_dict:
for name, value in orig_dict.items():
for prefix_value in prefixes:
if name.lower().startswith(prefix_value.lower()):
return_dict[name] = value
break
return return_dict
else:
return None
def _parse_response_for_dict_filter(response, filter):
''' Extracts name-values for names in filter from response header. Filter
out the standard http headers.'''
if response is None:
return None
return_dict = {}
orig_dict = _parse_response_for_dict(response)
if orig_dict:
for name, value in orig_dict.items():
if name.lower() in filter:
return_dict[name] = value
return return_dict
else:
return None
def _sign_string(key, string_to_sign, key_is_base64=True):
if key_is_base64:
key = _decode_base64_to_bytes(key)
else:
if isinstance(key, _unicode_type):
key = key.encode('utf-8')
if isinstance(string_to_sign, _unicode_type):
string_to_sign = string_to_sign.encode('utf-8')
signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256)
digest = signed_hmac_sha256.digest()
encoded_digest = _encode_base64(digest)
return encoded_digest
| apache-2.0 | 3,546,364,802,924,221,000 | 1,175,748,016,276,027,100 | 33.179044 | 181 | 0.625365 | false |
koobonil/Boss2D | Boss2D/addon/_old/webrtc-qt5.11.2_for_boss/tools_webrtc/cpu/cpu_mon.py | 6 | 2057 | #!/usr/bin/env python
#
# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import psutil
import sys
import numpy
from matplotlib import pyplot
class CpuSnapshot(object):
def __init__(self, label):
self.label = label
self.samples = []
def Capture(self, sample_count):
print ('Capturing %d CPU samples for %s...' %
((sample_count - len(self.samples)), self.label))
while len(self.samples) < sample_count:
self.samples.append(psutil.cpu_percent(1.0, False))
def Text(self):
return ('%s: avg=%s, median=%s, min=%s, max=%s' %
(self.label, numpy.average(self.samples),
numpy.median(self.samples),
numpy.min(self.samples), numpy.max(self.samples)))
def Max(self):
return numpy.max(self.samples)
def GrabCpuSamples(sample_count):
print 'Label for snapshot (enter to quit): '
label = raw_input().strip()
if len(label) == 0:
return None
snapshot = CpuSnapshot(label)
snapshot.Capture(sample_count)
return snapshot
def main():
print 'How many seconds to capture per snapshot (enter for 60)?'
sample_count = raw_input().strip()
if len(sample_count) > 0 and int(sample_count) > 0:
sample_count = int(sample_count)
else:
print 'Defaulting to 60 samples.'
sample_count = 60
snapshots = []
while True:
snapshot = GrabCpuSamples(sample_count)
if snapshot == None:
break
snapshots.append(snapshot)
if len(snapshots) == 0:
print 'no samples captured'
return -1
pyplot.title('CPU usage')
for s in snapshots:
pyplot.plot(s.samples, label=s.Text(), linewidth=2)
pyplot.legend()
pyplot.show()
return 0
if __name__ == '__main__':
sys.exit(main())
| mit | -1,000,090,284,158,523,300 | -5,701,861,692,834,842,000 | 23.783133 | 69 | 0.664074 | false |
spark-test/spark | examples/src/main/python/mllib/kmeans.py | 51 | 1552 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
A K-means clustering program using MLlib.
This example requires NumPy (http://www.numpy.org/).
"""
from __future__ import print_function
import sys
import numpy as np
from pyspark import SparkContext
from pyspark.mllib.clustering import KMeans
def parseVector(line):
return np.array([float(x) for x in line.split(' ')])
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: kmeans <file> <k>", file=sys.stderr)
sys.exit(-1)
sc = SparkContext(appName="KMeans")
lines = sc.textFile(sys.argv[1])
data = lines.map(parseVector)
k = int(sys.argv[2])
model = KMeans.train(data, k)
print("Final centers: " + str(model.clusterCenters))
print("Total Cost: " + str(model.computeCost(data)))
sc.stop()
| apache-2.0 | 3,220,742,271,165,850,000 | 4,654,459,632,517,396,000 | 32.021277 | 74 | 0.711985 | false |
PriceChild/ansible | lib/ansible/modules/windows/win_user.py | 56 | 4540 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Matt Martz <matt@sivel.net>, and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: win_user
version_added: "1.7"
short_description: Manages local Windows user accounts
description:
- Manages local Windows user accounts
options:
name:
description:
- Name of the user to create, remove or modify.
required: true
fullname:
description:
- Full name of the user
required: false
default: null
version_added: "1.9"
description:
description:
- Description of the user
required: false
default: null
version_added: "1.9"
password:
description:
- Optionally set the user's password to this (plain text) value.
required: false
default: null
update_password:
description:
- C(always) will update passwords if they differ. C(on_create) will
only set the password for newly created users.
required: false
choices: [ 'always', 'on_create' ]
default: always
version_added: "1.9"
password_expired:
description:
- C(yes) will require the user to change their password at next login.
C(no) will clear the expired password flag.
required: false
choices: [ 'yes', 'no' ]
default: null
version_added: "1.9"
password_never_expires:
description:
- C(yes) will set the password to never expire. C(no) will allow the
password to expire.
required: false
choices: [ 'yes', 'no' ]
default: null
version_added: "1.9"
user_cannot_change_password:
description:
- C(yes) will prevent the user from changing their password. C(no) will
allow the user to change their password.
required: false
choices: [ 'yes', 'no' ]
default: null
version_added: "1.9"
account_disabled:
description:
- C(yes) will disable the user account. C(no) will clear the disabled
flag.
required: false
choices: [ 'yes', 'no' ]
default: null
version_added: "1.9"
account_locked:
description:
- C(no) will unlock the user account if locked.
required: false
choices: [ 'no' ]
default: null
version_added: "1.9"
groups:
description:
- Adds or removes the user from this comma-separated lis of groups,
depending on the value of I(groups_action). When I(groups_action) is
C(replace) and I(groups) is set to the empty string ('groups='), the
user is removed from all groups.
required: false
version_added: "1.9"
groups_action:
description:
- If C(replace), the user is added as a member of each group in
I(groups) and removed from any other groups. If C(add), the user is
added to each group in I(groups) where not already a member. If
C(remove), the user is removed from each group in I(groups).
required: false
choices: [ "replace", "add", "remove" ]
default: "replace"
version_added: "1.9"
state:
description:
- When C(present), creates or updates the user account. When C(absent),
removes the user account if it exists. When C(query) (new in 1.9),
retrieves the user account details without making any changes.
required: false
choices:
- present
- absent
- query
default: present
aliases: []
author:
- "Paul Durivage (@angstwad)"
- "Chris Church (@cchurch)"
'''
EXAMPLES = r'''
- name: Ensure user bob is present
win_user:
name: bob
password: B0bP4ssw0rd
state: present
groups:
- Users
- name: Ensure user bob is absent
win_user:
name: bob
state: absent
'''
| gpl-3.0 | 8,970,779,632,370,961,000 | 7,824,890,899,443,801,000 | 28.673203 | 78 | 0.649559 | false |
frdb194/django | tests/managers_regress/models.py | 245 | 3566 | """
Various edge-cases for model managers.
"""
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils.encoding import force_text, python_2_unicode_compatible
class OnlyFred(models.Manager):
def get_queryset(self):
return super(OnlyFred, self).get_queryset().filter(name='fred')
class OnlyBarney(models.Manager):
def get_queryset(self):
return super(OnlyBarney, self).get_queryset().filter(name='barney')
class Value42(models.Manager):
def get_queryset(self):
return super(Value42, self).get_queryset().filter(value=42)
class AbstractBase1(models.Model):
name = models.CharField(max_length=50)
class Meta:
abstract = True
# Custom managers
manager1 = OnlyFred()
manager2 = OnlyBarney()
objects = models.Manager()
class AbstractBase2(models.Model):
value = models.IntegerField()
class Meta:
abstract = True
# Custom manager
restricted = Value42()
# No custom manager on this class to make sure the default case doesn't break.
class AbstractBase3(models.Model):
comment = models.CharField(max_length=50)
class Meta:
abstract = True
@python_2_unicode_compatible
class Parent(models.Model):
name = models.CharField(max_length=50)
manager = OnlyFred()
def __str__(self):
return self.name
# Managers from base classes are inherited and, if no manager is specified
# *and* the parent has a manager specified, the first one (in the MRO) will
# become the default.
@python_2_unicode_compatible
class Child1(AbstractBase1):
data = models.CharField(max_length=25)
def __str__(self):
return self.data
@python_2_unicode_compatible
class Child2(AbstractBase1, AbstractBase2):
data = models.CharField(max_length=25)
def __str__(self):
return self.data
@python_2_unicode_compatible
class Child3(AbstractBase1, AbstractBase3):
data = models.CharField(max_length=25)
def __str__(self):
return self.data
@python_2_unicode_compatible
class Child4(AbstractBase1):
data = models.CharField(max_length=25)
# Should be the default manager, although the parent managers are
# inherited.
default = models.Manager()
def __str__(self):
return self.data
@python_2_unicode_compatible
class Child5(AbstractBase3):
name = models.CharField(max_length=25)
default = OnlyFred()
objects = models.Manager()
def __str__(self):
return self.name
# Will inherit managers from AbstractBase1, but not Child4.
class Child6(Child4):
value = models.IntegerField()
# Will not inherit default manager from parent.
class Child7(Parent):
pass
# RelatedManagers
@python_2_unicode_compatible
class RelatedModel(models.Model):
test_gfk = GenericRelation('RelationModel', content_type_field='gfk_ctype', object_id_field='gfk_id')
exact = models.NullBooleanField()
def __str__(self):
return force_text(self.pk)
@python_2_unicode_compatible
class RelationModel(models.Model):
fk = models.ForeignKey(RelatedModel, models.CASCADE, related_name='test_fk')
m2m = models.ManyToManyField(RelatedModel, related_name='test_m2m')
gfk_ctype = models.ForeignKey(ContentType, models.SET_NULL, null=True)
gfk_id = models.IntegerField(null=True)
gfk = GenericForeignKey(ct_field='gfk_ctype', fk_field='gfk_id')
def __str__(self):
return force_text(self.pk)
| bsd-3-clause | 5,509,925,491,962,269,000 | 8,277,589,225,642,906,000 | 22.932886 | 105 | 0.700224 | false |
hkupty/python-mode | pymode/libs2/rope/contrib/finderrors.py | 93 | 2948 | """Finding bad name and attribute accesses
`find_errors` function can be used to find possible bad name and
attribute accesses. As an example::
errors = find_errors(project, project.get_resource('mod.py'))
for error in errors:
print '%s: %s' % (error.lineno, error.error)
prints possible errors for ``mod.py`` file.
TODO:
* use task handles
* reporting names at most once
* attributes of extension modules that don't appear in
extension_modules project config can be ignored
* not calling `PyScope.get_inner_scope_for_line()` if it is a
bottleneck; needs profiling
* not reporting occurrences where rope cannot infer the object
* rope saves multiple objects for some of the names in its objectdb
use all of them not to give false positives
* ... ;-)
"""
from rope.base import ast, evaluate, pyobjects
def find_errors(project, resource):
"""Find possible bad name and attribute accesses
It returns a list of `Error`\s.
"""
pymodule = project.pycore.resource_to_pyobject(resource)
finder = _BadAccessFinder(pymodule)
ast.walk(pymodule.get_ast(), finder)
return finder.errors
class _BadAccessFinder(object):
def __init__(self, pymodule):
self.pymodule = pymodule
self.scope = pymodule.get_scope()
self.errors = []
def _Name(self, node):
if isinstance(node.ctx, (ast.Store, ast.Param)):
return
scope = self.scope.get_inner_scope_for_line(node.lineno)
pyname = scope.lookup(node.id)
if pyname is None:
self._add_error(node, 'Unresolved variable')
elif self._is_defined_after(scope, pyname, node.lineno):
self._add_error(node, 'Defined later')
def _Attribute(self, node):
if not isinstance(node.ctx, ast.Store):
scope = self.scope.get_inner_scope_for_line(node.lineno)
pyname = evaluate.eval_node(scope, node.value)
if pyname is not None and \
pyname.get_object() != pyobjects.get_unknown():
if node.attr not in pyname.get_object():
self._add_error(node, 'Unresolved attribute')
ast.walk(node.value, self)
def _add_error(self, node, msg):
if isinstance(node, ast.Attribute):
name = node.attr
else:
name = node.id
if name != 'None':
error = Error(node.lineno, msg + ' ' + name)
self.errors.append(error)
def _is_defined_after(self, scope, pyname, lineno):
location = pyname.get_definition_location()
if location is not None and location[1] is not None:
if location[0] == self.pymodule and \
lineno <= location[1] <= scope.get_end():
return True
class Error(object):
def __init__(self, lineno, error):
self.lineno = lineno
self.error = error
def __str__(self):
return '%s: %s' % (self.lineno, self.error)
| lgpl-3.0 | -5,484,022,437,986,476,000 | 1,644,246,850,749,113,000 | 31.395604 | 68 | 0.625509 | false |
c-goosen/ctpug_11_july | flask/__init__.py | 1 | 4174 | from flask import Flask
from flask_bootstrap import Bootstrap
from flask import render_template
import bootstrap
import xmlrpclib
from io import BytesIO
import base64
app = Flask(__name__)
import xmlrpclib
username = 'username' #the user
pwd = 'password' #the password of the user
dbname = 'ctpug' #the database
sock_common = xmlrpclib.ServerProxy ('http://127.0.0.1:8069/xmlrpc/common')
uid = sock_common.login(dbname, username, pwd)
#replace localhost with the address of the server
sock = xmlrpclib.ServerProxy('http://127.0.0.1:8069/xmlrpc/object')
def test_connection(username,pwd,dbname):
connection_reply = 'Connection to Odoo - '
args = [] #query clause
ids = sock.execute(dbname, uid, pwd, 'res.partner', 'search', args)
fields = ['name', 'id', 'email'] #fields to read
data = sock.execute(dbname, uid, pwd, 'res.partner', 'read', ids, fields)
if data[0].name == 'admin':
connection_reply + 'successful'
else:
connection_reply + 'not successful'
return connection_reply
def get_products(username,pwd,dbname):
args = [] #query clause
ids = sock.execute(dbname, uid, pwd, 'product.product', 'search', args)
fields = ['id', 'lst_price', 'qty_available', 'product_tmpl_id'] #fields to read
data = sock.execute(dbname, uid, pwd, 'product.product', 'read', ids, fields)
return data
def get_product_templates(username,pwd,dbname, args):
args = args or [] #query clause
ids = sock.execute(dbname, uid, pwd, 'product.template', 'search', args)
fields = ['id', 'name', 'image_medium'] #fields to read
data = sock.execute(dbname, uid, pwd, 'product.template', 'read', ids, fields)
return data
def get_company_currency(username,pwd,dbname):
args = []
ids = sock.execute(dbname, uid, pwd, 'res.company', 'search', [('id','=',1)])
fields = ['currency_id'] #fields to read
company = sock.execute(dbname, uid, pwd, 'res.company', 'read', ids, fields)
ids = sock.execute(dbname, uid, pwd, 'res.currency', 'search', [('id','=',company[0]['currency_id'][0])])
fields = ['symbol']
currency_symbol = sock.execute(dbname, uid, pwd, 'res.currency', 'read', ids, fields)
return currency_symbol[0]['symbol']
@app.route('/products')
def products():
product_output = 'List of products </br></br>'
product_product = get_products(username,pwd,dbname)
#product_template = get_product_templates(username,pwd,dbname)
count = 0
for x in product_product:
args = [('id', '=', x['product_tmpl_id'][0])]
product_template = get_product_templates(username,pwd,dbname,args)
#product_output = product_output + product_template[0]['name']
#product_output = ''+x['product_tmpl_id']
#for y in product_template:
#if x['product_tmpl_id'] == y['id']:
#product_output = '\n |' + product_output + str(x['id']) + y['name'] + "<img style='display:block; width:100px;height:100px;' id='base64image' src='data:image/jpeg;base64, %s'/>" % y['image_medium'] +' | \n'
if product_template[0]['image_medium']:
product_output += '\n' + str(product_product[count]['id']) +' ' + product_template[0]['name'] + ' ' + get_company_currency(username,pwd,dbname) + str(product_product[count]['lst_price']) + "<img style='display:block; width:100px;height:100px;' id='base64image' src='data:image/jpeg;base64, %s'/>" % product_template[0]['image_medium'] +' \n'
count += 1
return product_output
#return 'List of products %s' % data[0]['id']
@app.route('/')
def index():
connection_reply = 'Connection to Odoo - '
args = [] #query clauses
ids = sock.execute(dbname, uid, pwd, 'res.partner', 'search', args)
fields = ['name', 'id', 'email'] #fields to read
data = sock.execute(dbname, uid, pwd, 'res.partner', 'read', ids, fields)
#return 'Hello %s' %data[0]
if data[0]['id'] == 3:
connection_reply = '%s successful' % connection_reply
else:
connection_reply = '%s not successful' % connection_reply
return connection_reply
#return render_template('index.html', title='Home', connection_reply=connection_reply)
if __name__ == '__main__':
app.run(debug=True)
Bootstrap(app)
#return app
#self.settings()
#__main__.initiate_connection(username,pwd,dbname)
#__main__.test_connection(username,pwd,dbname)
| cc0-1.0 | -6,877,756,599,009,038,000 | 5,595,582,017,804,237,000 | 35.938053 | 344 | 0.674653 | false |
ahojjati/letsencrypt | letsencrypt/account.py | 10 | 7268 | """Creates ACME accounts for server."""
import datetime
import hashlib
import logging
import os
import socket
from cryptography.hazmat.primitives import serialization
import pyrfc3339
import pytz
import zope.component
from acme import fields as acme_fields
from acme import jose
from acme import messages
from letsencrypt import errors
from letsencrypt import interfaces
from letsencrypt import le_util
logger = logging.getLogger(__name__)
class Account(object): # pylint: disable=too-few-public-methods
"""ACME protocol registration.
:ivar .RegistrationResource regr: Registration Resource
:ivar .JWK key: Authorized Account Key
:ivar .Meta: Account metadata
:ivar str id: Globally unique account identifier.
"""
class Meta(jose.JSONObjectWithFields):
"""Account metadata
:ivar datetime.datetime creation_dt: Creation date and time (UTC).
:ivar str creation_host: FQDN of host, where account has been created.
.. note:: ``creation_dt`` and ``creation_host`` are useful in
cross-machine migration scenarios.
"""
creation_dt = acme_fields.RFC3339Field("creation_dt")
creation_host = jose.Field("creation_host")
def __init__(self, regr, key, meta=None):
self.key = key
self.regr = regr
self.meta = self.Meta(
# pyrfc3339 drops microseconds, make sure __eq__ is sane
creation_dt=datetime.datetime.now(
tz=pytz.UTC).replace(microsecond=0),
creation_host=socket.getfqdn()) if meta is None else meta
self.id = hashlib.md5( # pylint: disable=invalid-name
self.key.key.public_key().public_bytes(
encoding=serialization.Encoding.DER,
format=serialization.PublicFormat.SubjectPublicKeyInfo)
).hexdigest()
# Implementation note: Email? Multiple accounts can have the
# same email address. Registration URI? Assigned by the
# server, not guaranteed to be stable over time, nor
# cannonical URI can be generated. ACME protocol doesn't allow
# account key (and thus its fingerprint) to be updated...
@property
def slug(self):
"""Short account identification string, useful for UI."""
return "{1}@{0} ({2})".format(pyrfc3339.generate(
self.meta.creation_dt), self.meta.creation_host, self.id[:4])
def __repr__(self):
return "<{0}({1})>".format(self.__class__.__name__, self.id)
def __eq__(self, other):
return (isinstance(other, self.__class__) and
self.key == other.key and self.regr == other.regr and
self.meta == other.meta)
def report_new_account(acc, config):
"""Informs the user about their new Let's Encrypt account."""
reporter = zope.component.queryUtility(interfaces.IReporter)
if reporter is None:
return
reporter.add_message(
"Your account credentials have been saved in your Let's Encrypt "
"configuration directory at {0}. You should make a secure backup "
"of this folder now. This configuration directory will also "
"contain certificates and private keys obtained by Let's Encrypt "
"so making regular backups of this folder is ideal.".format(
config.config_dir),
reporter.MEDIUM_PRIORITY, True)
if acc.regr.body.emails:
recovery_msg = ("If you lose your account credentials, you can "
"recover through e-mails sent to {0}.".format(
", ".join(acc.regr.body.emails)))
reporter.add_message(recovery_msg, reporter.HIGH_PRIORITY, True)
class AccountMemoryStorage(interfaces.AccountStorage):
"""In-memory account strage."""
def __init__(self, initial_accounts=None):
self.accounts = initial_accounts if initial_accounts is not None else {}
def find_all(self):
return self.accounts.values()
def save(self, account):
if account.id in self.accounts:
logger.debug("Overwriting account: %s", account.id)
self.accounts[account.id] = account
def load(self, account_id):
try:
return self.accounts[account_id]
except KeyError:
raise errors.AccountNotFound(account_id)
class AccountFileStorage(interfaces.AccountStorage):
"""Accounts file storage.
:ivar .IConfig config: Client configuration
"""
def __init__(self, config):
le_util.make_or_verify_dir(config.accounts_dir, 0o700, os.geteuid())
self.config = config
def _account_dir_path(self, account_id):
return os.path.join(self.config.accounts_dir, account_id)
@classmethod
def _regr_path(cls, account_dir_path):
return os.path.join(account_dir_path, "regr.json")
@classmethod
def _key_path(cls, account_dir_path):
return os.path.join(account_dir_path, "private_key.json")
@classmethod
def _metadata_path(cls, account_dir_path):
return os.path.join(account_dir_path, "meta.json")
def find_all(self):
try:
candidates = os.listdir(self.config.accounts_dir)
except OSError:
return []
accounts = []
for account_id in candidates:
try:
accounts.append(self.load(account_id))
except errors.AccountStorageError:
logger.debug("Account loading problem", exc_info=True)
return accounts
def load(self, account_id):
account_dir_path = self._account_dir_path(account_id)
if not os.path.isdir(account_dir_path):
raise errors.AccountNotFound(
"Account at %s does not exist" % account_dir_path)
try:
with open(self._regr_path(account_dir_path)) as regr_file:
regr = messages.RegistrationResource.json_loads(regr_file.read())
with open(self._key_path(account_dir_path)) as key_file:
key = jose.JWK.json_loads(key_file.read())
with open(self._metadata_path(account_dir_path)) as metadata_file:
meta = Account.Meta.json_loads(metadata_file.read())
except IOError as error:
raise errors.AccountStorageError(error)
acc = Account(regr, key, meta)
if acc.id != account_id:
raise errors.AccountStorageError(
"Account ids mismatch (expected: {0}, found: {1}".format(
account_id, acc.id))
return acc
def save(self, account):
account_dir_path = self._account_dir_path(account.id)
le_util.make_or_verify_dir(account_dir_path, 0o700, os.geteuid())
try:
with open(self._regr_path(account_dir_path), "w") as regr_file:
regr_file.write(account.regr.json_dumps())
with le_util.safe_open(self._key_path(account_dir_path),
"w", chmod=0o400) as key_file:
key_file.write(account.key.json_dumps())
with open(self._metadata_path(account_dir_path), "w") as metadata_file:
metadata_file.write(account.meta.json_dumps())
except IOError as error:
raise errors.AccountStorageError(error)
| apache-2.0 | 5,251,193,214,800,577,000 | 5,522,552,677,111,608,000 | 35.522613 | 83 | 0.627408 | false |
juharris/tensorflow | tensorflow/contrib/tensor_forest/python/ops/inference_ops.py | 15 | 2143 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for BrainTree v2 tree evaluation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import threading
from tensorflow.python.framework import common_shapes
from tensorflow.python.framework import load_library
from tensorflow.python.framework import ops
from tensorflow.python.platform import resource_loader
from tensorflow.python.platform import tf_logging as logging
INFERENCE_OPS_FILE = '_inference_ops.so'
_inference_ops = None
_ops_lock = threading.Lock()
# TODO(b/31222613): This op may be differentiable, and there may be
# latent bugs here.
ops.NotDifferentiable('TreePredictions')
ops.RegisterShape('TreePredictions')(common_shapes.call_cpp_shape_fn)
# Workaround for the fact that importing tensorflow imports contrib
# (even if a user isn't using this or any other contrib op), but
# there's not yet any guarantee that the shared object exists.
# In which case, "import tensorflow" will always crash, even for users that
# never use contrib.
def Load():
"""Load the inference ops library and return the loaded module."""
with _ops_lock:
global _inference_ops
if not _inference_ops:
ops_path = resource_loader.get_path_to_datafile(INFERENCE_OPS_FILE)
logging.info('data path: %s', ops_path)
_inference_ops = load_library.load_op_library(ops_path)
assert _inference_ops, 'Could not load inference_ops.so'
return _inference_ops
| apache-2.0 | 2,492,781,251,802,694,000 | -3,494,472,063,937,341,400 | 35.948276 | 80 | 0.729351 | false |
infrared5/massroute-pi | features/steps/bus_approaching_blink_steps.py | 1 | 1614 | from time import sleep
from app.component.modifier.blinker import Blinker
PINS = [0,1,2,3]
DELAY = 0.2
@given('A new Blinker instance provided with a Shifter reference')
def blinker_setup_with_shifter(context):
shifter = context.shifter
blinker = Blinker(shifter)
blinker.set_pins(PINS)
shifter.set_pins = MagicMock(return_value=None)
context.blinker = blinker
@when('Blinker:start() invoked')
def blinker_start(context):
context.blinker.start()
@then('Shifter:set_pins() invoked with 1 once')
def shifter_set_pins_on_once(context):
context.shifter.set_pins.assert_called_once_with(PINS, 1)
@given('A new Blinker instance with 0.2 second delay')
def blinker_setup_with_delay(context):
shifter = context.shifter
blinker = Blinker(shifter, DELAY)
blinker.set_pins(PINS)
shifter.set_pins = MagicMock(return_value=None)
context.blinker = blinker
@when('At least 0.2 seconds have lapsed')
def time_elapsed_two_milliseconds(context):
sleep(0.22)
@when('At least 0.4 seconds have lapsed')
def time_elapsed_four_milliseconds(context):
sleep(0.42)
@when('Blinker:stop() invoked')
def blinker_stop(context):
context.blinker.stop()
@then('Shifter:set_pins() invoked with 0')
def shifter_set_pins_off(context):
context.shifter.set_pins.assert_called_with(PINS, 0)
@then('Shifter:set_pins() invoked with 1 twice')
def shifter_set_pins_on_twice(context):
# once for off, twice for on
assert context.shifter.set_pins.call_count == 3
@then('Shifter:set_pins() not called more than once')
def shifter_set_pins_called_once(context):
context.shifter.set_pins.assert_called_once() | mit | -5,212,954,521,221,234,000 | 4,122,428,650,584,253,400 | 27.333333 | 66 | 0.740397 | false |
gregdek/ansible | lib/ansible/modules/network/cloudengine/ce_snmp_target_host.py | 25 | 32025 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ce_snmp_target_host
version_added: "2.4"
short_description: Manages SNMP target host configuration on HUAWEI CloudEngine switches.
description:
- Manages SNMP target host configurations on HUAWEI CloudEngine switches.
author:
- wangdezhuang (@QijunPan)
options:
version:
description:
- Version(s) Supported by SNMP Engine.
choices: ['none', 'v1', 'v2c', 'v3', 'v1v2c', 'v1v3', 'v2cv3', 'all']
connect_port:
description:
- Udp port used by SNMP agent to connect the Network management.
host_name:
description:
- Unique name to identify target host entry.
address:
description:
- Network Address.
notify_type:
description:
- To configure notify type as trap or inform.
choices: ['trap','inform']
vpn_name:
description:
- VPN instance Name.
recv_port:
description:
- UDP Port number used by network management to receive alarm messages.
security_model:
description:
- Security Model.
choices: ['v1','v2c', 'v3']
security_name:
description:
- Security Name.
security_name_v3:
description:
- Security Name V3.
security_level:
description:
- Security level indicating whether to use authentication and encryption.
choices: ['noAuthNoPriv','authentication', 'privacy']
is_public_net:
description:
- To enable or disable Public Net-manager for target Host.
default: no_use
choices: ['no_use','true','false']
interface_name:
description:
- Name of the interface to send the trap message.
'''
EXAMPLES = '''
- name: CloudEngine snmp target host test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: "Config SNMP version"
ce_snmp_target_host:
state: present
version: v2cv3
provider: "{{ cli }}"
- name: "Config SNMP target host"
ce_snmp_target_host:
state: present
host_name: test1
address: 1.1.1.1
notify_type: trap
vpn_name: js
security_model: v2c
security_name: wdz
provider: "{{ cli }}"
'''
RETURN = '''
changed:
description: check to see if a change was made on the device
returned: always
type: bool
sample: true
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"address": "10.135.182.158", "host_name": "test2",
"notify_type": "trap", "security_level": "authentication",
"security_model": "v3", "security_name_v3": "wdz",
"state": "present", "vpn_name": "js"}
existing:
description: k/v pairs of existing aaa server
returned: always
type: dict
sample: {}
end_state:
description: k/v pairs of aaa params after module execution
returned: always
type: dict
sample: {"target host info": [{"address": "10.135.182.158", "domain": "snmpUDPDomain",
"nmsName": "test2", "notifyType": "trap",
"securityLevel": "authentication", "securityModel": "v3",
"securityNameV3": "wdz", "vpnInstanceName": "js"}]}
updates:
description: command sent to the device
returned: always
type: list
sample: ["snmp-agent target-host host-name test2 trap address udp-domain 10.135.182.158 vpn-instance js params securityname wdz v3 authentication"]
'''
from xml.etree import ElementTree
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.cloudengine.ce import get_nc_config, set_nc_config, \
ce_argument_spec, get_config, load_config, check_ip_addr
# get snmp version
CE_GET_SNMP_VERSION = """
<filter type="subtree">
<snmp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<engine>
<version></version>
</engine>
</snmp>
</filter>
"""
# merge snmp version
CE_MERGE_SNMP_VERSION = """
<config>
<snmp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<engine operation="merge">
<version>%s</version>
</engine>
</snmp>
</config>
"""
# get snmp target host
CE_GET_SNMP_TARGET_HOST_HEADER = """
<filter type="subtree">
<snmp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<targetHosts>
<targetHost>
<nmsName></nmsName>
"""
CE_GET_SNMP_TARGET_HOST_TAIL = """
</targetHost>
</targetHosts>
</snmp>
</filter>
"""
# merge snmp target host
CE_MERGE_SNMP_TARGET_HOST_HEADER = """
<config>
<snmp xmlns="http://www.huawei.com/netconf/vrp" format-version="1.0" content-version="1.0">
<targetHosts>
<targetHost operation="merge">
<nmsName>%s</nmsName>
"""
CE_MERGE_SNMP_TARGET_HOST_TAIL = """
</targetHost>
</targetHosts>
</snmp>
</config>
"""
# create snmp target host
CE_CREATE_SNMP_TARGET_HOST_HEADER = """
<config>
<snmp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<targetHosts>
<targetHost operation="create">
<nmsName>%s</nmsName>
"""
CE_CREATE_SNMP_TARGET_HOST_TAIL = """
</targetHost>
</targetHosts>
</snmp>
</config>
"""
# delete snmp target host
CE_DELETE_SNMP_TARGET_HOST_HEADER = """
<config>
<snmp xmlns="http://www.huawei.com/netconf/vrp" format-version="1.0" content-version="1.0">
<targetHosts>
<targetHost operation="delete">
<nmsName>%s</nmsName>
"""
CE_DELETE_SNMP_TARGET_HOST_TAIL = """
</targetHost>
</targetHosts>
</snmp>
</config>
"""
INTERFACE_TYPE = ['ethernet', 'eth-trunk', 'tunnel', 'null', 'loopback',
'vlanif', '100ge', '40ge', 'mtunnel', '10ge', 'ge', 'meth', 'vbdif', 'nve']
class SnmpTargetHost(object):
""" Manages SNMP target host configuration """
def __init__(self, **kwargs):
""" Class init """
# module
argument_spec = kwargs["argument_spec"]
self.spec = argument_spec
required_together = [("address", "notify_type"), ("address", "notify_type")]
required_if = [
["security_model", "v1", ["security_name"]],
["security_model", "v2c", ["security_name"]],
["security_model", "v3", ["security_name_v3"]]
]
self.module = AnsibleModule(
argument_spec=argument_spec,
required_together=required_together,
required_if=required_if,
supports_check_mode=True
)
# module args
self.state = self.module.params['state']
self.version = self.module.params['version']
self.connect_port = self.module.params['connect_port']
self.host_name = self.module.params['host_name']
self.domain = "snmpUDPDomain"
self.address = self.module.params['address']
self.notify_type = self.module.params['notify_type']
self.vpn_name = self.module.params['vpn_name']
self.recv_port = self.module.params['recv_port']
self.security_model = self.module.params['security_model']
self.security_name = self.module.params['security_name']
self.security_name_v3 = self.module.params['security_name_v3']
self.security_level = self.module.params['security_level']
self.is_public_net = self.module.params['is_public_net']
self.interface_name = self.module.params['interface_name']
# config
self.cur_cli_cfg = dict()
self.cur_netconf_cfg = dict()
self.end_netconf_cfg = dict()
# state
self.changed = False
self.updates_cmd = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
def netconf_get_config(self, conf_str):
""" Get configure by netconf """
xml_str = get_nc_config(self.module, conf_str)
return xml_str
def netconf_set_config(self, conf_str):
""" Set configure by netconf """
xml_str = set_nc_config(self.module, conf_str)
return xml_str
def check_cli_args(self):
""" Check invalid cli args """
if self.connect_port:
if int(self.connect_port) != 161 and (int(self.connect_port) > 65535 or int(self.connect_port) < 1025):
self.module.fail_json(
msg='Error: The value of connect_port %s is out of [161, 1025 - 65535].' % self.connect_port)
def check_netconf_args(self, result):
""" Check invalid netconf args """
need_cfg = True
same_flag = True
delete_flag = False
result["target_host_info"] = []
if self.host_name:
if len(self.host_name) > 32 or len(self.host_name) < 1:
self.module.fail_json(
msg='Error: The len of host_name is out of [1 - 32].')
if self.vpn_name and self.is_public_net != 'no_use':
if self.is_public_net == "true":
self.module.fail_json(
msg='Error: Do not support vpn_name and is_public_net at the same time.')
conf_str = CE_GET_SNMP_TARGET_HOST_HEADER
if self.domain:
conf_str += "<domain></domain>"
if self.address:
if not check_ip_addr(ipaddr=self.address):
self.module.fail_json(
msg='Error: The host address [%s] is invalid.' % self.address)
conf_str += "<address></address>"
if self.notify_type:
conf_str += "<notifyType></notifyType>"
if self.vpn_name:
if len(self.vpn_name) > 31 or len(self.vpn_name) < 1:
self.module.fail_json(
msg='Error: The len of vpn_name is out of [1 - 31].')
conf_str += "<vpnInstanceName></vpnInstanceName>"
if self.recv_port:
if int(self.recv_port) > 65535 or int(self.recv_port) < 0:
self.module.fail_json(
msg='Error: The value of recv_port is out of [0 - 65535].')
conf_str += "<portNumber></portNumber>"
if self.security_model:
conf_str += "<securityModel></securityModel>"
if self.security_name:
if len(self.security_name) > 32 or len(self.security_name) < 1:
self.module.fail_json(
msg='Error: The len of security_name is out of [1 - 32].')
conf_str += "<securityName></securityName>"
if self.security_name_v3:
if len(self.security_name_v3) > 32 or len(self.security_name_v3) < 1:
self.module.fail_json(
msg='Error: The len of security_name_v3 is out of [1 - 32].')
conf_str += "<securityNameV3></securityNameV3>"
if self.security_level:
conf_str += "<securityLevel></securityLevel>"
if self.is_public_net != 'no_use':
conf_str += "<isPublicNet></isPublicNet>"
if self.interface_name:
if len(self.interface_name) > 63 or len(self.interface_name) < 1:
self.module.fail_json(
msg='Error: The len of interface_name is out of [1 - 63].')
find_flag = False
for item in INTERFACE_TYPE:
if item in self.interface_name:
find_flag = True
break
if not find_flag:
self.module.fail_json(
msg='Error: Please input full name of interface_name.')
conf_str += "<interface-name></interface-name>"
conf_str += CE_GET_SNMP_TARGET_HOST_TAIL
recv_xml = self.netconf_get_config(conf_str=conf_str)
if "<data/>" in recv_xml:
if self.state == "present":
same_flag = False
else:
delete_flag = False
else:
xml_str = recv_xml.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
target_host_info = root.findall(
"data/snmp/targetHosts/targetHost")
if target_host_info:
for tmp in target_host_info:
tmp_dict = dict()
for site in tmp:
if site.tag in ["nmsName", "domain", "address", "notifyType", "vpnInstanceName",
"portNumber", "securityModel", "securityName", "securityNameV3",
"securityLevel", "isPublicNet", "interface-name"]:
tmp_dict[site.tag] = site.text
result["target_host_info"].append(tmp_dict)
if result["target_host_info"]:
for tmp in result["target_host_info"]:
same_flag = True
if "nmsName" in tmp.keys():
if tmp["nmsName"] != self.host_name:
same_flag = False
else:
delete_flag = True
if "domain" in tmp.keys():
if tmp["domain"] != self.domain:
same_flag = False
if "address" in tmp.keys():
if tmp["address"] != self.address:
same_flag = False
if "notifyType" in tmp.keys():
if tmp["notifyType"] != self.notify_type:
same_flag = False
if "vpnInstanceName" in tmp.keys():
if tmp["vpnInstanceName"] != self.vpn_name:
same_flag = False
if "portNumber" in tmp.keys():
if tmp["portNumber"] != self.recv_port:
same_flag = False
if "securityModel" in tmp.keys():
if tmp["securityModel"] != self.security_model:
same_flag = False
if "securityName" in tmp.keys():
if tmp["securityName"] != self.security_name:
same_flag = False
if "securityNameV3" in tmp.keys():
if tmp["securityNameV3"] != self.security_name_v3:
same_flag = False
if "securityLevel" in tmp.keys():
if tmp["securityLevel"] != self.security_level:
same_flag = False
if "isPublicNet" in tmp.keys():
if tmp["isPublicNet"] != self.is_public_net:
same_flag = False
if "interface-name" in tmp.keys():
if tmp["interface-name"] != self.interface_name:
same_flag = False
if same_flag:
break
if self.state == "present":
need_cfg = True
if same_flag:
need_cfg = False
else:
need_cfg = False
if delete_flag:
need_cfg = True
result["need_cfg"] = need_cfg
def cli_load_config(self, commands):
""" Load configure by cli """
if not self.module.check_mode:
load_config(self.module, commands)
def get_snmp_version(self):
""" Get snmp version """
version = None
conf_str = CE_GET_SNMP_VERSION
recv_xml = self.netconf_get_config(conf_str=conf_str)
if "<data/>" in recv_xml:
pass
else:
xml_str = recv_xml.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
version_info = root.find("data/snmp/engine")
if version_info:
for site in version_info:
if site.tag in ["version"]:
version = site.text
return version
def cli_get_connect_port(self):
""" Get connect port by cli """
regular = "| include snmp | include snmp-agent udp-port"
flags = list()
flags.append(regular)
tmp_cfg = get_config(self.module, flags)
return tmp_cfg
def get_proposed(self):
""" Get proposed state """
self.proposed["state"] = self.state
if self.version:
self.proposed["version"] = self.version
if self.connect_port:
self.proposed["connect_port"] = self.connect_port
if self.host_name:
self.proposed["host_name"] = self.host_name
if self.address:
self.proposed["address"] = self.address
if self.notify_type:
self.proposed["notify_type"] = self.notify_type
if self.vpn_name:
self.proposed["vpn_name"] = self.vpn_name
if self.recv_port:
self.proposed["recv_port"] = self.recv_port
if self.security_model:
self.proposed["security_model"] = self.security_model
if self.security_name:
self.proposed["security_name"] = "******"
if self.security_name_v3:
self.proposed["security_name_v3"] = self.security_name_v3
if self.security_level:
self.proposed["security_level"] = self.security_level
if self.is_public_net != 'no_use':
self.proposed["is_public_net"] = self.is_public_net
if self.interface_name:
self.proposed["interface_name"] = self.interface_name
def get_existing(self):
""" Get existing state """
if self.version:
version = self.get_snmp_version()
if version:
self.cur_cli_cfg["version"] = version
self.existing["version"] = version
if self.connect_port:
tmp_cfg = self.cli_get_connect_port()
if tmp_cfg:
temp_data = tmp_cfg.split(r"udp-port ")
self.cur_cli_cfg["connect port"] = temp_data[1]
self.existing["connect port"] = temp_data[1]
if self.host_name:
self.existing["target host info"] = self.cur_netconf_cfg[
"target_host_info"]
def get_end_state(self):
""" Get end state """
if self.version:
version = self.get_snmp_version()
if version:
self.end_state["version"] = version
if self.connect_port:
tmp_cfg = self.cli_get_connect_port()
if tmp_cfg:
temp_data = tmp_cfg.split(r"udp-port ")
self.end_state["connect port"] = temp_data[1]
if self.host_name:
self.end_state["target host info"] = self.end_netconf_cfg[
"target_host_info"]
def config_version_cli(self):
""" Config version by cli """
if "disable" in self.cur_cli_cfg["version"]:
cmd = "snmp-agent sys-info version %s" % self.version
self.updates_cmd.append(cmd)
cmds = list()
cmds.append(cmd)
self.cli_load_config(cmds)
self.changed = True
else:
if self.version != self.cur_cli_cfg["version"]:
cmd = "snmp-agent sys-info version %s disable" % self.cur_cli_cfg[
"version"]
self.updates_cmd.append(cmd)
cmd = "snmp-agent sys-info version %s" % self.version
self.updates_cmd.append(cmd)
cmds = list()
cmds.append(cmd)
self.cli_load_config(cmds)
self.changed = True
def undo_config_version_cli(self):
""" Undo config version by cli """
if "disable" in self.cur_cli_cfg["version"]:
pass
else:
cmd = "snmp-agent sys-info version %s disable" % self.cur_cli_cfg[
"version"]
cmds = list()
cmds.append(cmd)
self.updates_cmd.append(cmd)
self.cli_load_config(cmds)
self.changed = True
def config_connect_port_cli(self):
""" Config connect port by cli """
if "connect port" in self.cur_cli_cfg.keys():
if self.cur_cli_cfg["connect port"] == self.connect_port:
pass
else:
cmd = "snmp-agent udp-port %s" % self.connect_port
cmds = list()
cmds.append(cmd)
self.updates_cmd.append(cmd)
self.cli_load_config(cmds)
self.changed = True
else:
cmd = "snmp-agent udp-port %s" % self.connect_port
cmds = list()
cmds.append(cmd)
self.updates_cmd.append(cmd)
self.cli_load_config(cmds)
self.changed = True
def undo_config_connect_port_cli(self):
""" Undo config connect port by cli """
if "connect port" in self.cur_cli_cfg.keys():
if not self.cur_cli_cfg["connect port"]:
pass
else:
cmd = "undo snmp-agent udp-port"
cmds = list()
cmds.append(cmd)
self.updates_cmd.append(cmd)
self.cli_load_config(cmds)
self.changed = True
def merge_snmp_target_host(self):
""" Merge snmp target host operation """
conf_str = CE_MERGE_SNMP_TARGET_HOST_HEADER % self.host_name
if self.domain:
conf_str += "<domain>%s</domain>" % self.domain
if self.address:
conf_str += "<address>%s</address>" % self.address
if self.notify_type:
conf_str += "<notifyType>%s</notifyType>" % self.notify_type
if self.vpn_name:
conf_str += "<vpnInstanceName>%s</vpnInstanceName>" % self.vpn_name
if self.recv_port:
conf_str += "<portNumber>%s</portNumber>" % self.recv_port
if self.security_model:
conf_str += "<securityModel>%s</securityModel>" % self.security_model
if self.security_name:
conf_str += "<securityName>%s</securityName>" % self.security_name
if self.security_name_v3:
conf_str += "<securityNameV3>%s</securityNameV3>" % self.security_name_v3
if self.security_level:
conf_str += "<securityLevel>%s</securityLevel>" % self.security_level
if self.is_public_net != 'no_use':
conf_str += "<isPublicNet>%s</isPublicNet>" % self.is_public_net
if self.interface_name:
conf_str += "<interface-name>%s</interface-name>" % self.interface_name
conf_str += CE_MERGE_SNMP_TARGET_HOST_TAIL
recv_xml = self.netconf_set_config(conf_str=conf_str)
if "<ok/>" not in recv_xml:
self.module.fail_json(msg='Error: Merge snmp target host failed.')
cmd = "snmp-agent target-host host-name %s " % self.host_name
cmd += "%s " % self.notify_type
cmd += "address udp-domain %s " % self.address
if self.recv_port:
cmd += "udp-port %s " % self.recv_port
if self.interface_name:
cmd += "source %s " % self.interface_name
if self.vpn_name:
cmd += "vpn-instance %s " % self.vpn_name
if self.is_public_net == "true":
cmd += "public-net "
if self.security_model in ["v1", "v2c"] and self.security_name:
cmd += "params securityname %s %s " % (
"******", self.security_model)
if self.security_model == "v3" and self.security_name_v3:
cmd += "params securityname %s %s " % (
self.security_name_v3, self.security_model)
if self.security_level and self.security_level in ["authentication", "privacy "]:
cmd += "%s" % self.security_level
self.changed = True
self.updates_cmd.append(cmd)
def delete_snmp_target_host(self):
""" Delete snmp target host operation """
conf_str = CE_DELETE_SNMP_TARGET_HOST_HEADER % self.host_name
if self.domain:
conf_str += "<domain>%s</domain>" % self.domain
if self.address:
conf_str += "<address>%s</address>" % self.address
if self.notify_type:
conf_str += "<notifyType>%s</notifyType>" % self.notify_type
if self.vpn_name:
conf_str += "<vpnInstanceName>%s</vpnInstanceName>" % self.vpn_name
if self.recv_port:
conf_str += "<portNumber>%s</portNumber>" % self.recv_port
if self.security_model:
conf_str += "<securityModel>%s</securityModel>" % self.security_model
if self.security_name:
conf_str += "<securityName>%s</securityName>" % self.security_name
if self.security_name_v3:
conf_str += "<securityNameV3>%s</securityNameV3>" % self.security_name_v3
if self.security_level:
conf_str += "<securityLevel>%s</securityLevel>" % self.security_level
if self.is_public_net != 'no_use':
conf_str += "<isPublicNet>%s</isPublicNet>" % self.is_public_net
if self.interface_name:
conf_str += "<interface-name>%s</interface-name>" % self.interface_name
conf_str += CE_DELETE_SNMP_TARGET_HOST_TAIL
recv_xml = self.netconf_set_config(conf_str=conf_str)
if "<ok/>" not in recv_xml:
self.module.fail_json(msg='Error: Delete snmp target host failed.')
if not self.address:
cmd = "undo snmp-agent target-host host-name %s " % self.host_name
else:
cmd = "undo snmp-agent target-host trap address udp-domain %s " % self.address
if self.recv_port:
cmd += "udp-port %s " % self.recv_port
if self.interface_name:
cmd += "source %s " % self.interface_name
if self.vpn_name:
cmd += "vpn-instance %s " % self.vpn_name
if self.is_public_net == "true":
cmd += "public-net "
if self.security_model in ["v1", "v2c"] and self.security_name:
cmd += "params securityname %s" % "******"
if self.security_model == "v3" and self.security_name_v3:
cmd += "params securityname %s" % self.security_name_v3
self.changed = True
self.updates_cmd.append(cmd)
def merge_snmp_version(self):
""" Merge snmp version operation """
conf_str = CE_MERGE_SNMP_VERSION % self.version
recv_xml = self.netconf_set_config(conf_str=conf_str)
if "<ok/>" not in recv_xml:
self.module.fail_json(msg='Error: Merge snmp version failed.')
if self.version == "none":
cmd = "snmp-agent sys-info version %s disable" % self.cur_cli_cfg[
"version"]
self.updates_cmd.append(cmd)
elif self.version == "v1v2c":
cmd = "snmp-agent sys-info version v1"
self.updates_cmd.append(cmd)
cmd = "snmp-agent sys-info version v2c"
self.updates_cmd.append(cmd)
elif self.version == "v1v3":
cmd = "snmp-agent sys-info version v1"
self.updates_cmd.append(cmd)
cmd = "snmp-agent sys-info version v3"
self.updates_cmd.append(cmd)
elif self.version == "v2cv3":
cmd = "snmp-agent sys-info version v2c"
self.updates_cmd.append(cmd)
cmd = "snmp-agent sys-info version v3"
self.updates_cmd.append(cmd)
else:
cmd = "snmp-agent sys-info version %s" % self.version
self.updates_cmd.append(cmd)
self.changed = True
def work(self):
""" Main work function """
self.check_cli_args()
self.check_netconf_args(self.cur_netconf_cfg)
self.get_proposed()
self.get_existing()
if self.state == "present":
if self.version:
if self.version != self.cur_cli_cfg["version"]:
self.merge_snmp_version()
if self.connect_port:
self.config_connect_port_cli()
if self.cur_netconf_cfg["need_cfg"]:
self.merge_snmp_target_host()
else:
if self.connect_port:
self.undo_config_connect_port_cli()
if self.cur_netconf_cfg["need_cfg"]:
self.delete_snmp_target_host()
self.check_netconf_args(self.end_netconf_cfg)
self.get_end_state()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
self.results['updates'] = self.updates_cmd
self.module.exit_json(**self.results)
def main():
""" Module main """
argument_spec = dict(
state=dict(choices=['present', 'absent'], default='present'),
version=dict(choices=['none', 'v1', 'v2c', 'v3',
'v1v2c', 'v1v3', 'v2cv3', 'all']),
connect_port=dict(type='str'),
host_name=dict(type='str'),
address=dict(type='str'),
notify_type=dict(choices=['trap', 'inform']),
vpn_name=dict(type='str'),
recv_port=dict(type='str'),
security_model=dict(choices=['v1', 'v2c', 'v3']),
security_name=dict(type='str', no_log=True),
security_name_v3=dict(type='str'),
security_level=dict(
choices=['noAuthNoPriv', 'authentication', 'privacy']),
is_public_net=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
interface_name=dict(type='str')
)
argument_spec.update(ce_argument_spec)
module = SnmpTargetHost(argument_spec=argument_spec)
module.work()
if __name__ == '__main__':
main()
| gpl-3.0 | 8,378,454,853,730,882,000 | 8,529,923,187,593,897,000 | 34.742188 | 151 | 0.533989 | false |
cosenal/osf.io | website/archiver/model.py | 38 | 5944 | import datetime
from modularodm import fields
from framework.mongo import ObjectId
from framework.mongo import StoredObject
from website.archiver import (
ARCHIVER_INITIATED,
ARCHIVER_SUCCESS,
ARCHIVER_FAILURE,
ARCHIVER_FAILURE_STATUSES
)
from website.addons.base import StorageAddonBase
from website import settings
class ArchiveTarget(StoredObject):
"""Stores the results of archiving a single addon
"""
_id = fields.StringField(
primary=True,
default=lambda: str(ObjectId())
)
# addon_short_name of target addon
name = fields.StringField()
status = fields.StringField(default=ARCHIVER_INITIATED)
# <dict> representation of a website.archiver.AggregateStatResult
# Format: {
# 'target_id': <str>,
# 'target_name': <str>,
# 'targets': <list>(StatResult | AggregateStatResult),
# 'num_files': <int>,
# 'disk_usage': <float>,
# }
stat_result = fields.DictionaryField()
errors = fields.StringField(list=True)
def __repr__(self):
return '<{0}(_id={1}, name={2}, status={3})>'.format(
self.__class__.__name__,
self._id,
self.name,
self.status
)
class ArchiveJob(StoredObject):
_id = fields.StringField(
primary=True,
default=lambda: str(ObjectId())
)
# whether or not the ArchiveJob is complete (success or fail)
done = fields.BooleanField(default=False)
# whether or not emails have been sent for this ArchiveJob
sent = fields.BooleanField(default=False)
status = fields.StringField(default=ARCHIVER_INITIATED)
datetime_initiated = fields.DateTimeField(default=datetime.datetime.utcnow)
dst_node = fields.ForeignField('node', backref='active')
src_node = fields.ForeignField('node')
initiator = fields.ForeignField('user')
target_addons = fields.ForeignField('archivetarget', list=True)
def __repr__(self):
return (
'<{ClassName}(_id={self._id}, done={self.done}, '
' status={self.status}, src_node={self.src_node}, dst_node={self.dst_node})>'
).format(ClassName=self.__class__.__name__, self=self)
@property
def children(self):
return [node.archive_job for node in self.dst_node.nodes if node.primary]
@property
def parent(self):
parent_node = self.dst_node.parent_node
return parent_node.archive_job if parent_node else None
@property
def success(self):
return self.status == ARCHIVER_SUCCESS
@property
def pending(self):
return any([
target for target in self.target_addons
if target.status not in (ARCHIVER_SUCCESS, ARCHIVER_FAILURE)
])
def info(self):
return self.src_node, self.dst_node, self.initiator
def target_info(self):
return [
{
'name': target.name,
'status': target.status,
'stat_result': target.stat_result,
'errors': target.errors
}
for target in self.target_addons
]
def archive_tree_finished(self):
if not self.pending:
return len(
[
ret for ret in [
child.archive_tree_finished()
for child in self.children
] if ret]
) if len(self.children) else True
return False
def _fail_above(self):
"""Marks all ArchiveJob instances attached to Nodes above this as failed
"""
parent = self.parent
if parent:
parent.status = ARCHIVER_FAILURE
parent.save()
def _post_update_target(self):
"""Checks for success or failure if the ArchiveJob on self.dst_node
is finished
"""
if self.status == ARCHIVER_FAILURE:
return
if not self.pending:
self.done = True
if any([target.status for target in self.target_addons if target.status in ARCHIVER_FAILURE_STATUSES]):
self.status = ARCHIVER_FAILURE
self._fail_above()
else:
self.status = ARCHIVER_SUCCESS
self.save()
def get_target(self, addon_short_name):
try:
return [addon for addon in self.target_addons if addon.name == addon_short_name][0]
except IndexError:
return None
def _set_target(self, addon_short_name):
if self.get_target(addon_short_name):
return
target = ArchiveTarget(name=addon_short_name)
target.save()
self.target_addons.append(target)
def set_targets(self):
addons = []
for addon in [self.src_node.get_addon(name)
for name in settings.ADDONS_ARCHIVABLE
if settings.ADDONS_ARCHIVABLE[name] != 'none']:
if not addon or not addon.complete or not isinstance(addon, StorageAddonBase):
continue
archive_errors = getattr(addon, 'archive_errors', None)
if not archive_errors or (archive_errors and not archive_errors()):
if addon.config.short_name == 'dataverse':
addons.append(addon.config.short_name + '-draft')
addons.append(addon.config.short_name + '-published')
else:
addons.append(addon.config.short_name)
for addon in addons:
self._set_target(addon)
self.save()
def update_target(self, addon_short_name, status, stat_result=None, errors=None):
stat_result = stat_result or {}
errors = errors or []
target = self.get_target(addon_short_name)
target.status = status
target.errors = errors
target.stat_result = stat_result
target.save()
self._post_update_target()
| apache-2.0 | 7,233,596,614,983,280,000 | -1,970,442,603,559,272,000 | 30.956989 | 115 | 0.587988 | false |
benssson/flatbuffers | tests/py_test.py | 16 | 49316 | # coding=utf-8
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import sys
PY_VERSION = sys.version_info[:2]
import ctypes
from collections import defaultdict
import timeit
import unittest
from flatbuffers import compat
from flatbuffers.compat import range_func as compat_range
import flatbuffers
from flatbuffers import number_types as N
import MyGame # refers to generated code
import MyGame.Example # refers to generated code
import MyGame.Example.Any # refers to generated code
import MyGame.Example.Color # refers to generated code
import MyGame.Example.Monster # refers to generated code
import MyGame.Example.Test # refers to generated code
import MyGame.Example.Stat # refers to generated code
import MyGame.Example.Vec3 # refers to generated code
def assertRaises(test_case, fn, exception_class):
''' Backwards-compatible assertion for exceptions raised. '''
exc = None
try:
fn()
except Exception as e:
exc = e
test_case.assertTrue(exc is not None)
test_case.assertTrue(isinstance(exc, exception_class))
class TestWireFormat(unittest.TestCase):
def test_wire_format(self):
# Verify that using the generated Python code builds a buffer without
# returning errors, and is interpreted correctly:
gen_buf, gen_off = make_monster_from_generated_code()
CheckReadBuffer(gen_buf, gen_off)
# Verify that the canonical flatbuffer file is readable by the
# generated Python code. Note that context managers are not part of
# Python 2.5, so we use the simpler open/close methods here:
f = open('monsterdata_test.mon', 'rb')
canonicalWireData = f.read()
f.close()
CheckReadBuffer(bytearray(canonicalWireData), 0)
# Write the generated buffer out to a file:
f = open('monsterdata_python_wire.mon', 'wb')
f.write(gen_buf[gen_off:])
f.close()
def CheckReadBuffer(buf, offset):
''' CheckReadBuffer checks that the given buffer is evaluated correctly
as the example Monster. '''
def asserter(stmt):
''' An assertion helper that is separated from TestCase classes. '''
if not stmt:
raise AssertionError('CheckReadBuffer case failed')
monster = MyGame.Example.Monster.Monster.GetRootAsMonster(buf, offset)
asserter(monster.Hp() == 80)
asserter(monster.Mana() == 150)
asserter(monster.Name() == b'MyMonster')
# initialize a Vec3 from Pos()
vec = monster.Pos()
asserter(vec is not None)
# verify the properties of the Vec3
asserter(vec.X() == 1.0)
asserter(vec.Y() == 2.0)
asserter(vec.Z() == 3.0)
asserter(vec.Test1() == 3.0)
asserter(vec.Test2() == 2)
# initialize a Test from Test3(...)
t = MyGame.Example.Test.Test()
t = vec.Test3(t)
asserter(t is not None)
# verify the properties of the Test
asserter(t.A() == 5)
asserter(t.B() == 6)
# verify that the enum code matches the enum declaration:
union_type = MyGame.Example.Any.Any
asserter(monster.TestType() == union_type.Monster)
# initialize a Table from a union field Test(...)
table2 = monster.Test()
asserter(type(table2) is flatbuffers.table.Table)
# initialize a Monster from the Table from the union
monster2 = MyGame.Example.Monster.Monster()
monster2.Init(table2.Bytes, table2.Pos)
asserter(monster2.Name() == b"Fred")
# iterate through the first monster's inventory:
asserter(monster.InventoryLength() == 5)
invsum = 0
for i in compat_range(monster.InventoryLength()):
v = monster.Inventory(i)
invsum += int(v)
asserter(invsum == 10)
asserter(monster.Test4Length() == 2)
# create a 'Test' object and populate it:
test0 = monster.Test4(0)
asserter(type(test0) is MyGame.Example.Test.Test)
test1 = monster.Test4(1)
asserter(type(test1) is MyGame.Example.Test.Test)
# the position of test0 and test1 are swapped in monsterdata_java_wire
# and monsterdata_test_wire, so ignore ordering
v0 = test0.A()
v1 = test0.B()
v2 = test1.A()
v3 = test1.B()
sumtest12 = int(v0) + int(v1) + int(v2) + int(v3)
asserter(sumtest12 == 100)
asserter(monster.TestarrayofstringLength() == 2)
asserter(monster.Testarrayofstring(0) == b"test1")
asserter(monster.Testarrayofstring(1) == b"test2")
asserter(monster.TestarrayoftablesLength() == 0)
asserter(monster.TestnestedflatbufferLength() == 0)
asserter(monster.Testempty() is None)
class TestFuzz(unittest.TestCase):
''' Low level stress/fuzz test: serialize/deserialize a variety of
different kinds of data in different combinations '''
binary_type = compat.binary_types[0] # this will always exist
ofInt32Bytes = binary_type([0x83, 0x33, 0x33, 0x33])
ofInt64Bytes = binary_type([0x84, 0x44, 0x44, 0x44,
0x44, 0x44, 0x44, 0x44])
overflowingInt32Val = flatbuffers.encode.Get(flatbuffers.packer.int32,
ofInt32Bytes, 0)
overflowingInt64Val = flatbuffers.encode.Get(flatbuffers.packer.int64,
ofInt64Bytes, 0)
# Values we're testing against: chosen to ensure no bits get chopped
# off anywhere, and also be different from eachother.
boolVal = True
int8Val = N.Int8Flags.py_type(-127) # 0x81
uint8Val = N.Uint8Flags.py_type(0xFF)
int16Val = N.Int16Flags.py_type(-32222) # 0x8222
uint16Val = N.Uint16Flags.py_type(0xFEEE)
int32Val = N.Int32Flags.py_type(overflowingInt32Val)
uint32Val = N.Uint32Flags.py_type(0xFDDDDDDD)
int64Val = N.Int64Flags.py_type(overflowingInt64Val)
uint64Val = N.Uint64Flags.py_type(0xFCCCCCCCCCCCCCCC)
# Python uses doubles, so force it here
float32Val = N.Float32Flags.py_type(ctypes.c_float(3.14159).value)
float64Val = N.Float64Flags.py_type(3.14159265359)
def test_fuzz(self):
return self.check_once(11, 100)
def check_once(self, fuzzFields, fuzzObjects):
testValuesMax = 11 # hardcoded to the number of scalar types
builder = flatbuffers.Builder(0)
l = LCG()
objects = [0 for _ in compat_range(fuzzObjects)]
# Generate fuzzObjects random objects each consisting of
# fuzzFields fields, each of a random type.
for i in compat_range(fuzzObjects):
builder.StartObject(fuzzFields)
for j in compat_range(fuzzFields):
choice = int(l.Next()) % testValuesMax
if choice == 0:
builder.PrependBoolSlot(int(j), self.boolVal, False)
elif choice == 1:
builder.PrependInt8Slot(int(j), self.int8Val, 0)
elif choice == 2:
builder.PrependUint8Slot(int(j), self.uint8Val, 0)
elif choice == 3:
builder.PrependInt16Slot(int(j), self.int16Val, 0)
elif choice == 4:
builder.PrependUint16Slot(int(j), self.uint16Val, 0)
elif choice == 5:
builder.PrependInt32Slot(int(j), self.int32Val, 0)
elif choice == 6:
builder.PrependUint32Slot(int(j), self.uint32Val, 0)
elif choice == 7:
builder.PrependInt64Slot(int(j), self.int64Val, 0)
elif choice == 8:
builder.PrependUint64Slot(int(j), self.uint64Val, 0)
elif choice == 9:
builder.PrependFloat32Slot(int(j), self.float32Val, 0)
elif choice == 10:
builder.PrependFloat64Slot(int(j), self.float64Val, 0)
else:
raise RuntimeError('unreachable')
off = builder.EndObject()
# store the offset from the end of the builder buffer,
# since it will keep growing:
objects[i] = off
# Do some bookkeeping to generate stats on fuzzes:
stats = defaultdict(int)
def check(table, desc, want, got):
stats[desc] += 1
self.assertEqual(want, got, "%s != %s, %s" % (want, got, desc))
l = LCG() # Reset.
# Test that all objects we generated are readable and return the
# expected values. We generate random objects in the same order
# so this is deterministic.
for i in compat_range(fuzzObjects):
table = flatbuffers.table.Table(builder.Bytes,
len(builder.Bytes) - objects[i])
for j in compat_range(fuzzFields):
field_count = flatbuffers.builder.VtableMetadataFields + j
f = N.VOffsetTFlags.py_type(field_count *
N.VOffsetTFlags.bytewidth)
choice = int(l.Next()) % testValuesMax
if choice == 0:
check(table, "bool", self.boolVal,
table.GetSlot(f, False, N.BoolFlags))
elif choice == 1:
check(table, "int8", self.int8Val,
table.GetSlot(f, 0, N.Int8Flags))
elif choice == 2:
check(table, "uint8", self.uint8Val,
table.GetSlot(f, 0, N.Uint8Flags))
elif choice == 3:
check(table, "int16", self.int16Val,
table.GetSlot(f, 0, N.Int16Flags))
elif choice == 4:
check(table, "uint16", self.uint16Val,
table.GetSlot(f, 0, N.Uint16Flags))
elif choice == 5:
check(table, "int32", self.int32Val,
table.GetSlot(f, 0, N.Int32Flags))
elif choice == 6:
check(table, "uint32", self.uint32Val,
table.GetSlot(f, 0, N.Uint32Flags))
elif choice == 7:
check(table, "int64", self.int64Val,
table.GetSlot(f, 0, N.Int64Flags))
elif choice == 8:
check(table, "uint64", self.uint64Val,
table.GetSlot(f, 0, N.Uint64Flags))
elif choice == 9:
check(table, "float32", self.float32Val,
table.GetSlot(f, 0, N.Float32Flags))
elif choice == 10:
check(table, "float64", self.float64Val,
table.GetSlot(f, 0, N.Float64Flags))
else:
raise RuntimeError('unreachable')
# If enough checks were made, verify that all scalar types were used:
self.assertEqual(testValuesMax, len(stats),
"fuzzing failed to test all scalar types: %s" % stats)
class TestByteLayout(unittest.TestCase):
''' TestByteLayout checks the bytes of a Builder in various scenarios. '''
def assertBuilderEquals(self, builder, want_chars_or_ints):
def integerize(x):
if isinstance(x, compat.string_types):
return ord(x)
return x
want_ints = list(map(integerize, want_chars_or_ints))
want = bytearray(want_ints)
got = builder.Bytes[builder.Head():] # use the buffer directly
self.assertEqual(want, got)
def test_numbers(self):
b = flatbuffers.Builder(0)
self.assertBuilderEquals(b, [])
b.PrependBool(True)
self.assertBuilderEquals(b, [1])
b.PrependInt8(-127)
self.assertBuilderEquals(b, [129, 1])
b.PrependUint8(255)
self.assertBuilderEquals(b, [255, 129, 1])
b.PrependInt16(-32222)
self.assertBuilderEquals(b, [0x22, 0x82, 0, 255, 129, 1]) # first pad
b.PrependUint16(0xFEEE)
# no pad this time:
self.assertBuilderEquals(b, [0xEE, 0xFE, 0x22, 0x82, 0, 255, 129, 1])
b.PrependInt32(-53687092)
self.assertBuilderEquals(b, [204, 204, 204, 252, 0xEE, 0xFE,
0x22, 0x82, 0, 255, 129, 1])
b.PrependUint32(0x98765432)
self.assertBuilderEquals(b, [0x32, 0x54, 0x76, 0x98,
204, 204, 204, 252,
0xEE, 0xFE, 0x22, 0x82,
0, 255, 129, 1])
def test_numbers64(self):
b = flatbuffers.Builder(0)
b.PrependUint64(0x1122334455667788)
self.assertBuilderEquals(b, [0x88, 0x77, 0x66, 0x55,
0x44, 0x33, 0x22, 0x11])
b = flatbuffers.Builder(0)
b.PrependInt64(0x1122334455667788)
self.assertBuilderEquals(b, [0x88, 0x77, 0x66, 0x55,
0x44, 0x33, 0x22, 0x11])
def test_1xbyte_vector(self):
b = flatbuffers.Builder(0)
self.assertBuilderEquals(b, [])
b.StartVector(flatbuffers.number_types.Uint8Flags.bytewidth, 1, 1)
self.assertBuilderEquals(b, [0, 0, 0]) # align to 4bytes
b.PrependByte(1)
self.assertBuilderEquals(b, [1, 0, 0, 0])
b.EndVector(1)
self.assertBuilderEquals(b, [1, 0, 0, 0, 1, 0, 0, 0]) # padding
def test_2xbyte_vector(self):
b = flatbuffers.Builder(0)
b.StartVector(flatbuffers.number_types.Uint8Flags.bytewidth, 2, 1)
self.assertBuilderEquals(b, [0, 0]) # align to 4bytes
b.PrependByte(1)
self.assertBuilderEquals(b, [1, 0, 0])
b.PrependByte(2)
self.assertBuilderEquals(b, [2, 1, 0, 0])
b.EndVector(2)
self.assertBuilderEquals(b, [2, 0, 0, 0, 2, 1, 0, 0]) # padding
def test_1xuint16_vector(self):
b = flatbuffers.Builder(0)
b.StartVector(flatbuffers.number_types.Uint16Flags.bytewidth, 1, 1)
self.assertBuilderEquals(b, [0, 0]) # align to 4bytes
b.PrependUint16(1)
self.assertBuilderEquals(b, [1, 0, 0, 0])
b.EndVector(1)
self.assertBuilderEquals(b, [1, 0, 0, 0, 1, 0, 0, 0]) # padding
def test_2xuint16_vector(self):
b = flatbuffers.Builder(0)
b.StartVector(flatbuffers.number_types.Uint16Flags.bytewidth, 2, 1)
self.assertBuilderEquals(b, []) # align to 4bytes
b.PrependUint16(0xABCD)
self.assertBuilderEquals(b, [0xCD, 0xAB])
b.PrependUint16(0xDCBA)
self.assertBuilderEquals(b, [0xBA, 0xDC, 0xCD, 0xAB])
b.EndVector(2)
self.assertBuilderEquals(b, [2, 0, 0, 0, 0xBA, 0xDC, 0xCD, 0xAB])
def test_create_ascii_string(self):
b = flatbuffers.Builder(0)
b.CreateString(u"foo", encoding='ascii')
# 0-terminated, no pad:
self.assertBuilderEquals(b, [3, 0, 0, 0, 'f', 'o', 'o', 0])
b.CreateString(u"moop", encoding='ascii')
# 0-terminated, 3-byte pad:
self.assertBuilderEquals(b, [4, 0, 0, 0, 'm', 'o', 'o', 'p',
0, 0, 0, 0,
3, 0, 0, 0, 'f', 'o', 'o', 0])
def test_create_utf8_string(self):
b = flatbuffers.Builder(0)
b.CreateString(u"Цлїςσδε")
self.assertBuilderEquals(b, "\x0e\x00\x00\x00\xd0\xa6\xd0\xbb\xd1\x97" \
"\xcf\x82\xcf\x83\xce\xb4\xce\xb5\x00\x00")
b.CreateString(u"フムアムカモケモ")
self.assertBuilderEquals(b, "\x18\x00\x00\x00\xef\xbe\x8c\xef\xbe\x91" \
"\xef\xbd\xb1\xef\xbe\x91\xef\xbd\xb6\xef\xbe\x93\xef\xbd\xb9\xef" \
"\xbe\x93\x00\x00\x00\x00\x0e\x00\x00\x00\xd0\xa6\xd0\xbb\xd1\x97" \
"\xcf\x82\xcf\x83\xce\xb4\xce\xb5\x00\x00")
def test_create_arbitrary_string(self):
b = flatbuffers.Builder(0)
s = "\x01\x02\x03"
b.CreateString(s) # Default encoding is utf-8.
# 0-terminated, no pad:
self.assertBuilderEquals(b, [3, 0, 0, 0, 1, 2, 3, 0])
s2 = "\x04\x05\x06\x07"
b.CreateString(s2) # Default encoding is utf-8.
# 0-terminated, 3-byte pad:
self.assertBuilderEquals(b, [4, 0, 0, 0, 4, 5, 6, 7, 0, 0, 0, 0,
3, 0, 0, 0, 1, 2, 3, 0])
def test_empty_vtable(self):
b = flatbuffers.Builder(0)
b.StartObject(0)
self.assertBuilderEquals(b, [])
b.EndObject()
self.assertBuilderEquals(b, [4, 0, 4, 0, 4, 0, 0, 0])
def test_vtable_with_one_true_bool(self):
b = flatbuffers.Builder(0)
self.assertBuilderEquals(b, [])
b.StartObject(1)
self.assertBuilderEquals(b, [])
b.PrependBoolSlot(0, True, False)
b.EndObject()
self.assertBuilderEquals(b, [
6, 0, # vtable bytes
8, 0, # length of object including vtable offset
7, 0, # start of bool value
6, 0, 0, 0, # offset for start of vtable (int32)
0, 0, 0, # padded to 4 bytes
1, # bool value
])
def test_vtable_with_one_default_bool(self):
b = flatbuffers.Builder(0)
self.assertBuilderEquals(b, [])
b.StartObject(1)
self.assertBuilderEquals(b, [])
b.PrependBoolSlot(0, False, False)
b.EndObject()
self.assertBuilderEquals(b, [
6, 0, # vtable bytes
4, 0, # end of object from here
0, 0, # entry 1 is zero
6, 0, 0, 0, # offset for start of vtable (int32)
])
def test_vtable_with_one_int16(self):
b = flatbuffers.Builder(0)
b.StartObject(1)
b.PrependInt16Slot(0, 0x789A, 0)
b.EndObject()
self.assertBuilderEquals(b, [
6, 0, # vtable bytes
8, 0, # end of object from here
6, 0, # offset to value
6, 0, 0, 0, # offset for start of vtable (int32)
0, 0, # padding to 4 bytes
0x9A, 0x78,
])
def test_vtable_with_two_int16(self):
b = flatbuffers.Builder(0)
b.StartObject(2)
b.PrependInt16Slot(0, 0x3456, 0)
b.PrependInt16Slot(1, 0x789A, 0)
b.EndObject()
self.assertBuilderEquals(b, [
8, 0, # vtable bytes
8, 0, # end of object from here
6, 0, # offset to value 0
4, 0, # offset to value 1
8, 0, 0, 0, # offset for start of vtable (int32)
0x9A, 0x78, # value 1
0x56, 0x34, # value 0
])
def test_vtable_with_int16_and_bool(self):
b = flatbuffers.Builder(0)
b.StartObject(2)
b.PrependInt16Slot(0, 0x3456, 0)
b.PrependBoolSlot(1, True, False)
b.EndObject()
self.assertBuilderEquals(b, [
8, 0, # vtable bytes
8, 0, # end of object from here
6, 0, # offset to value 0
5, 0, # offset to value 1
8, 0, 0, 0, # offset for start of vtable (int32)
0, # padding
1, # value 1
0x56, 0x34, # value 0
])
def test_vtable_with_empty_vector(self):
b = flatbuffers.Builder(0)
b.StartVector(flatbuffers.number_types.Uint8Flags.bytewidth, 0, 1)
vecend = b.EndVector(0)
b.StartObject(1)
b.PrependUOffsetTRelativeSlot(0, vecend, 0)
b.EndObject()
self.assertBuilderEquals(b, [
6, 0, # vtable bytes
8, 0,
4, 0, # offset to vector offset
6, 0, 0, 0, # offset for start of vtable (int32)
4, 0, 0, 0,
0, 0, 0, 0, # length of vector (not in struct)
])
def test_vtable_with_empty_vector_of_byte_and_some_scalars(self):
b = flatbuffers.Builder(0)
b.StartVector(flatbuffers.number_types.Uint8Flags.bytewidth, 0, 1)
vecend = b.EndVector(0)
b.StartObject(2)
b.PrependInt16Slot(0, 55, 0)
b.PrependUOffsetTRelativeSlot(1, vecend, 0)
b.EndObject()
self.assertBuilderEquals(b, [
8, 0, # vtable bytes
12, 0,
10, 0, # offset to value 0
4, 0, # offset to vector offset
8, 0, 0, 0, # vtable loc
8, 0, 0, 0, # value 1
0, 0, 55, 0, # value 0
0, 0, 0, 0, # length of vector (not in struct)
])
def test_vtable_with_1_int16_and_2vector_of_int16(self):
b = flatbuffers.Builder(0)
b.StartVector(flatbuffers.number_types.Int16Flags.bytewidth, 2, 1)
b.PrependInt16(0x1234)
b.PrependInt16(0x5678)
vecend = b.EndVector(2)
b.StartObject(2)
b.PrependUOffsetTRelativeSlot(1, vecend, 0)
b.PrependInt16Slot(0, 55, 0)
b.EndObject()
self.assertBuilderEquals(b, [
8, 0, # vtable bytes
12, 0, # length of object
6, 0, # start of value 0 from end of vtable
8, 0, # start of value 1 from end of buffer
8, 0, 0, 0, # offset for start of vtable (int32)
0, 0, # padding
55, 0, # value 0
4, 0, 0, 0, # vector position from here
2, 0, 0, 0, # length of vector (uint32)
0x78, 0x56, # vector value 1
0x34, 0x12, # vector value 0
])
def test_vtable_with_1_struct_of_1_int8__1_int16__1_int32(self):
b = flatbuffers.Builder(0)
b.StartObject(1)
b.Prep(4+4+4, 0)
b.PrependInt8(55)
b.Pad(3)
b.PrependInt16(0x1234)
b.Pad(2)
b.PrependInt32(0x12345678)
structStart = b.Offset()
b.PrependStructSlot(0, structStart, 0)
b.EndObject()
self.assertBuilderEquals(b, [
6, 0, # vtable bytes
16, 0, # end of object from here
4, 0, # start of struct from here
6, 0, 0, 0, # offset for start of vtable (int32)
0x78, 0x56, 0x34, 0x12, # value 2
0, 0, # padding
0x34, 0x12, # value 1
0, 0, 0, # padding
55, # value 0
])
def test_vtable_with_1_vector_of_2_struct_of_2_int8(self):
b = flatbuffers.Builder(0)
b.StartVector(flatbuffers.number_types.Int8Flags.bytewidth*2, 2, 1)
b.PrependInt8(33)
b.PrependInt8(44)
b.PrependInt8(55)
b.PrependInt8(66)
vecend = b.EndVector(2)
b.StartObject(1)
b.PrependUOffsetTRelativeSlot(0, vecend, 0)
b.EndObject()
self.assertBuilderEquals(b, [
6, 0, # vtable bytes
8, 0,
4, 0, # offset of vector offset
6, 0, 0, 0, # offset for start of vtable (int32)
4, 0, 0, 0, # vector start offset
2, 0, 0, 0, # vector length
66, # vector value 1,1
55, # vector value 1,0
44, # vector value 0,1
33, # vector value 0,0
])
def test_table_with_some_elements(self):
b = flatbuffers.Builder(0)
b.StartObject(2)
b.PrependInt8Slot(0, 33, 0)
b.PrependInt16Slot(1, 66, 0)
off = b.EndObject()
b.Finish(off)
self.assertBuilderEquals(b, [
12, 0, 0, 0, # root of table: points to vtable offset
8, 0, # vtable bytes
8, 0, # end of object from here
7, 0, # start of value 0
4, 0, # start of value 1
8, 0, 0, 0, # offset for start of vtable (int32)
66, 0, # value 1
0, # padding
33, # value 0
])
def test__one_unfinished_table_and_one_finished_table(self):
b = flatbuffers.Builder(0)
b.StartObject(2)
b.PrependInt8Slot(0, 33, 0)
b.PrependInt8Slot(1, 44, 0)
off = b.EndObject()
b.Finish(off)
b.StartObject(3)
b.PrependInt8Slot(0, 55, 0)
b.PrependInt8Slot(1, 66, 0)
b.PrependInt8Slot(2, 77, 0)
off = b.EndObject()
b.Finish(off)
self.assertBuilderEquals(b, [
16, 0, 0, 0, # root of table: points to object
0, 0, # padding
10, 0, # vtable bytes
8, 0, # size of object
7, 0, # start of value 0
6, 0, # start of value 1
5, 0, # start of value 2
10, 0, 0, 0, # offset for start of vtable (int32)
0, # padding
77, # value 2
66, # value 1
55, # value 0
12, 0, 0, 0, # root of table: points to object
8, 0, # vtable bytes
8, 0, # size of object
7, 0, # start of value 0
6, 0, # start of value 1
8, 0, 0, 0, # offset for start of vtable (int32)
0, 0, # padding
44, # value 1
33, # value 0
])
def test_a_bunch_of_bools(self):
b = flatbuffers.Builder(0)
b.StartObject(8)
b.PrependBoolSlot(0, True, False)
b.PrependBoolSlot(1, True, False)
b.PrependBoolSlot(2, True, False)
b.PrependBoolSlot(3, True, False)
b.PrependBoolSlot(4, True, False)
b.PrependBoolSlot(5, True, False)
b.PrependBoolSlot(6, True, False)
b.PrependBoolSlot(7, True, False)
off = b.EndObject()
b.Finish(off)
self.assertBuilderEquals(b, [
24, 0, 0, 0, # root of table: points to vtable offset
20, 0, # vtable bytes
12, 0, # size of object
11, 0, # start of value 0
10, 0, # start of value 1
9, 0, # start of value 2
8, 0, # start of value 3
7, 0, # start of value 4
6, 0, # start of value 5
5, 0, # start of value 6
4, 0, # start of value 7
20, 0, 0, 0, # vtable offset
1, # value 7
1, # value 6
1, # value 5
1, # value 4
1, # value 3
1, # value 2
1, # value 1
1, # value 0
])
def test_three_bools(self):
b = flatbuffers.Builder(0)
b.StartObject(3)
b.PrependBoolSlot(0, True, False)
b.PrependBoolSlot(1, True, False)
b.PrependBoolSlot(2, True, False)
off = b.EndObject()
b.Finish(off)
self.assertBuilderEquals(b, [
16, 0, 0, 0, # root of table: points to vtable offset
0, 0, # padding
10, 0, # vtable bytes
8, 0, # size of object
7, 0, # start of value 0
6, 0, # start of value 1
5, 0, # start of value 2
10, 0, 0, 0, # vtable offset from here
0, # padding
1, # value 2
1, # value 1
1, # value 0
])
def test_some_floats(self):
b = flatbuffers.Builder(0)
b.StartObject(1)
b.PrependFloat32Slot(0, 1.0, 0.0)
off = b.EndObject()
self.assertBuilderEquals(b, [
6, 0, # vtable bytes
8, 0, # size of object
4, 0, # start of value 0
6, 0, 0, 0, # vtable offset
0, 0, 128, 63, # value 0
])
def make_monster_from_generated_code():
''' Use generated code to build the example Monster. '''
b = flatbuffers.Builder(0)
string = b.CreateString("MyMonster")
test1 = b.CreateString("test1")
test2 = b.CreateString("test2")
fred = b.CreateString("Fred")
MyGame.Example.Monster.MonsterStartInventoryVector(b, 5)
b.PrependByte(4)
b.PrependByte(3)
b.PrependByte(2)
b.PrependByte(1)
b.PrependByte(0)
inv = b.EndVector(5)
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddName(b, fred)
mon2 = MyGame.Example.Monster.MonsterEnd(b)
MyGame.Example.Monster.MonsterStartTest4Vector(b, 2)
MyGame.Example.Test.CreateTest(b, 10, 20)
MyGame.Example.Test.CreateTest(b, 30, 40)
test4 = b.EndVector(2)
MyGame.Example.Monster.MonsterStartTestarrayofstringVector(b, 2)
b.PrependUOffsetTRelative(test2)
b.PrependUOffsetTRelative(test1)
testArrayOfString = b.EndVector(2)
MyGame.Example.Monster.MonsterStart(b)
pos = MyGame.Example.Vec3.CreateVec3(b, 1.0, 2.0, 3.0, 3.0, 2, 5, 6)
MyGame.Example.Monster.MonsterAddPos(b, pos)
MyGame.Example.Monster.MonsterAddHp(b, 80)
MyGame.Example.Monster.MonsterAddName(b, string)
MyGame.Example.Monster.MonsterAddInventory(b, inv)
MyGame.Example.Monster.MonsterAddTestType(b, 1)
MyGame.Example.Monster.MonsterAddTest(b, mon2)
MyGame.Example.Monster.MonsterAddTest4(b, test4)
MyGame.Example.Monster.MonsterAddTestarrayofstring(b, testArrayOfString)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
return b.Bytes, b.Head()
class TestAllCodePathsOfExampleSchema(unittest.TestCase):
def setUp(self, *args, **kwargs):
super(TestAllCodePathsOfExampleSchema, self).setUp(*args, **kwargs)
b = flatbuffers.Builder(0)
MyGame.Example.Monster.MonsterStart(b)
gen_mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(gen_mon)
self.mon = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Bytes,
b.Head())
def test_default_monster_pos(self):
self.assertTrue(self.mon.Pos() is None)
def test_nondefault_monster_mana(self):
b = flatbuffers.Builder(0)
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddMana(b, 50)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
got_mon = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Bytes,
b.Head())
self.assertEqual(50, got_mon.Mana())
def test_default_monster_hp(self):
self.assertEqual(100, self.mon.Hp())
def test_default_monster_name(self):
self.assertEqual('', self.mon.Name())
def test_default_monster_inventory_item(self):
self.assertEqual(0, self.mon.Inventory(0))
def test_default_monster_inventory_length(self):
self.assertEqual(0, self.mon.InventoryLength())
def test_default_monster_color(self):
self.assertEqual(MyGame.Example.Color.Color.Blue, self.mon.Color())
def test_nondefault_monster_color(self):
b = flatbuffers.Builder(0)
color = MyGame.Example.Color.Color.Red
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddColor(b, color)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
mon2 = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Bytes,
b.Head())
self.assertEqual(MyGame.Example.Color.Color.Red, mon2.Color())
def test_default_monster_testtype(self):
self.assertEqual(0, self.mon.TestType())
def test_default_monster_test_field(self):
self.assertEqual(None, self.mon.Test())
def test_default_monster_test4_item(self):
self.assertEqual(None, self.mon.Test4(0))
def test_default_monster_test4_length(self):
self.assertEqual(0, self.mon.Test4Length())
def test_default_monster_testarrayofstring(self):
self.assertEqual("", self.mon.Testarrayofstring(0))
def test_default_monster_testarrayofstring_length(self):
self.assertEqual(0, self.mon.TestarrayofstringLength())
def test_default_monster_testarrayoftables(self):
self.assertEqual(None, self.mon.Testarrayoftables(0))
def test_nondefault_monster_testarrayoftables(self):
b = flatbuffers.Builder(0)
# make a child Monster within a vector of Monsters:
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddHp(b, 99)
sub_monster = MyGame.Example.Monster.MonsterEnd(b)
# build the vector:
MyGame.Example.Monster.MonsterStartTestarrayoftablesVector(b, 1)
b.PrependUOffsetTRelative(sub_monster)
vec = b.EndVector(1)
# make the parent monster and include the vector of Monster:
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddTestarrayoftables(b, vec)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
# inspect the resulting data:
mon2 = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Output(), 0)
self.assertEqual(99, mon2.Testarrayoftables(0).Hp())
self.assertEqual(1, mon2.TestarrayoftablesLength())
def test_default_monster_testarrayoftables_length(self):
self.assertEqual(0, self.mon.TestarrayoftablesLength())
def test_nondefault_monster_enemy(self):
b = flatbuffers.Builder(0)
# make an Enemy object:
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddHp(b, 88)
enemy = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(enemy)
# make the parent monster and include the vector of Monster:
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddEnemy(b, enemy)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
# inspect the resulting data:
mon2 = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Bytes,
b.Head())
self.assertEqual(88, mon2.Enemy().Hp())
def test_default_monster_testnestedflatbuffer(self):
self.assertEqual(0, self.mon.Testnestedflatbuffer(0))
def test_default_monster_testnestedflatbuffer_length(self):
self.assertEqual(0, self.mon.TestnestedflatbufferLength())
def test_nondefault_monster_testnestedflatbuffer(self):
b = flatbuffers.Builder(0)
MyGame.Example.Monster.MonsterStartTestnestedflatbufferVector(b, 3)
b.PrependByte(4)
b.PrependByte(2)
b.PrependByte(0)
sub_buf = b.EndVector(3)
# make the parent monster and include the vector of Monster:
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddTestnestedflatbuffer(b, sub_buf)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
# inspect the resulting data:
mon2 = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Bytes,
b.Head())
self.assertEqual(3, mon2.TestnestedflatbufferLength())
self.assertEqual(0, mon2.Testnestedflatbuffer(0))
self.assertEqual(2, mon2.Testnestedflatbuffer(1))
self.assertEqual(4, mon2.Testnestedflatbuffer(2))
def test_nondefault_monster_testempty(self):
b = flatbuffers.Builder(0)
# make a Stat object:
MyGame.Example.Stat.StatStart(b)
MyGame.Example.Stat.StatAddVal(b, 123)
my_stat = MyGame.Example.Stat.StatEnd(b)
b.Finish(my_stat)
# include the stat object in a monster:
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddTestempty(b, my_stat)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
# inspect the resulting data:
mon2 = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Bytes,
b.Head())
self.assertEqual(123, mon2.Testempty().Val())
def test_default_monster_testbool(self):
self.assertFalse(self.mon.Testbool())
def test_nondefault_monster_testbool(self):
b = flatbuffers.Builder(0)
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddTestbool(b, True)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
# inspect the resulting data:
mon2 = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Bytes,
b.Head())
self.assertTrue(mon2.Testbool())
def test_default_monster_testhashes(self):
self.assertEqual(0, self.mon.Testhashs32Fnv1())
self.assertEqual(0, self.mon.Testhashu32Fnv1())
self.assertEqual(0, self.mon.Testhashs64Fnv1())
self.assertEqual(0, self.mon.Testhashu64Fnv1())
self.assertEqual(0, self.mon.Testhashs32Fnv1a())
self.assertEqual(0, self.mon.Testhashu32Fnv1a())
self.assertEqual(0, self.mon.Testhashs64Fnv1a())
self.assertEqual(0, self.mon.Testhashu64Fnv1a())
def test_nondefault_monster_testhashes(self):
b = flatbuffers.Builder(0)
MyGame.Example.Monster.MonsterStart(b)
MyGame.Example.Monster.MonsterAddTesthashs32Fnv1(b, 1)
MyGame.Example.Monster.MonsterAddTesthashu32Fnv1(b, 2)
MyGame.Example.Monster.MonsterAddTesthashs64Fnv1(b, 3)
MyGame.Example.Monster.MonsterAddTesthashu64Fnv1(b, 4)
MyGame.Example.Monster.MonsterAddTesthashs32Fnv1a(b, 5)
MyGame.Example.Monster.MonsterAddTesthashu32Fnv1a(b, 6)
MyGame.Example.Monster.MonsterAddTesthashs64Fnv1a(b, 7)
MyGame.Example.Monster.MonsterAddTesthashu64Fnv1a(b, 8)
mon = MyGame.Example.Monster.MonsterEnd(b)
b.Finish(mon)
# inspect the resulting data:
mon2 = MyGame.Example.Monster.Monster.GetRootAsMonster(b.Bytes,
b.Head())
self.assertEqual(1, mon2.Testhashs32Fnv1())
self.assertEqual(2, mon2.Testhashu32Fnv1())
self.assertEqual(3, mon2.Testhashs64Fnv1())
self.assertEqual(4, mon2.Testhashu64Fnv1())
self.assertEqual(5, mon2.Testhashs32Fnv1a())
self.assertEqual(6, mon2.Testhashu32Fnv1a())
self.assertEqual(7, mon2.Testhashs64Fnv1a())
self.assertEqual(8, mon2.Testhashu64Fnv1a())
def test_getrootas_for_nonroot_table(self):
b = flatbuffers.Builder(0)
string = b.CreateString("MyStat")
MyGame.Example.Stat.StatStart(b)
MyGame.Example.Stat.StatAddId(b, string)
MyGame.Example.Stat.StatAddVal(b, 12345678)
MyGame.Example.Stat.StatAddCount(b, 12345)
stat = MyGame.Example.Stat.StatEnd(b)
b.Finish(stat)
stat2 = MyGame.Example.Stat.Stat.GetRootAsStat(b.Bytes, b.Head())
self.assertEqual(b"MyStat", stat2.Id())
self.assertEqual(12345678, stat2.Val())
self.assertEqual(12345, stat2.Count())
class TestVtableDeduplication(unittest.TestCase):
''' TestVtableDeduplication verifies that vtables are deduplicated. '''
def test_vtable_deduplication(self):
b = flatbuffers.Builder(0)
b.StartObject(4)
b.PrependByteSlot(0, 0, 0)
b.PrependByteSlot(1, 11, 0)
b.PrependByteSlot(2, 22, 0)
b.PrependInt16Slot(3, 33, 0)
obj0 = b.EndObject()
b.StartObject(4)
b.PrependByteSlot(0, 0, 0)
b.PrependByteSlot(1, 44, 0)
b.PrependByteSlot(2, 55, 0)
b.PrependInt16Slot(3, 66, 0)
obj1 = b.EndObject()
b.StartObject(4)
b.PrependByteSlot(0, 0, 0)
b.PrependByteSlot(1, 77, 0)
b.PrependByteSlot(2, 88, 0)
b.PrependInt16Slot(3, 99, 0)
obj2 = b.EndObject()
got = b.Bytes[b.Head():]
want = bytearray([
240, 255, 255, 255, # == -12. offset to dedupped vtable.
99, 0,
88,
77,
248, 255, 255, 255, # == -8. offset to dedupped vtable.
66, 0,
55,
44,
12, 0,
8, 0,
0, 0,
7, 0,
6, 0,
4, 0,
12, 0, 0, 0,
33, 0,
22,
11,
])
self.assertEqual((len(want), want), (len(got), got))
table0 = flatbuffers.table.Table(b.Bytes, len(b.Bytes) - obj0)
table1 = flatbuffers.table.Table(b.Bytes, len(b.Bytes) - obj1)
table2 = flatbuffers.table.Table(b.Bytes, len(b.Bytes) - obj2)
def _checkTable(tab, voffsett_value, b, c, d):
# vtable size
got = tab.GetVOffsetTSlot(0, 0)
self.assertEqual(12, got, 'case 0, 0')
# object size
got = tab.GetVOffsetTSlot(2, 0)
self.assertEqual(8, got, 'case 2, 0')
# default value
got = tab.GetVOffsetTSlot(4, 0)
self.assertEqual(voffsett_value, got, 'case 4, 0')
got = tab.GetSlot(6, 0, N.Uint8Flags)
self.assertEqual(b, got, 'case 6, 0')
val = tab.GetSlot(8, 0, N.Uint8Flags)
self.assertEqual(c, val, 'failed 8, 0')
got = tab.GetSlot(10, 0, N.Uint8Flags)
self.assertEqual(d, got, 'failed 10, 0')
_checkTable(table0, 0, 11, 22, 33)
_checkTable(table1, 0, 44, 55, 66)
_checkTable(table2, 0, 77, 88, 99)
class TestExceptions(unittest.TestCase):
def test_object_is_nested_error(self):
b = flatbuffers.Builder(0)
b.StartObject(0)
assertRaises(self, lambda: b.StartObject(0),
flatbuffers.builder.IsNestedError)
def test_object_is_not_nested_error(self):
b = flatbuffers.Builder(0)
assertRaises(self, lambda: b.EndObject(),
flatbuffers.builder.IsNotNestedError)
def test_struct_is_not_inline_error(self):
b = flatbuffers.Builder(0)
b.StartObject(0)
assertRaises(self, lambda: b.PrependStructSlot(0, 1, 0),
flatbuffers.builder.StructIsNotInlineError)
def test_unreachable_error(self):
b = flatbuffers.Builder(0)
assertRaises(self, lambda: b.PrependUOffsetTRelative(1),
flatbuffers.builder.OffsetArithmeticError)
def test_create_string_is_nested_error(self):
b = flatbuffers.Builder(0)
b.StartObject(0)
s = 'test1'
assertRaises(self, lambda: b.CreateString(s),
flatbuffers.builder.IsNestedError)
def test_finished_bytes_error(self):
b = flatbuffers.Builder(0)
assertRaises(self, lambda: b.Output(),
flatbuffers.builder.BuilderNotFinishedError)
def CheckAgainstGoldDataGo():
try:
gen_buf, gen_off = make_monster_from_generated_code()
fn = 'monsterdata_go_wire.mon'
if not os.path.exists(fn):
print('Go-generated data does not exist, failed.')
return False
# would like to use a context manager here, but it's less
# backwards-compatible:
f = open(fn, 'rb')
go_wire_data = f.read()
f.close()
CheckReadBuffer(bytearray(go_wire_data), 0)
if not bytearray(gen_buf[gen_off:]) == bytearray(go_wire_data):
raise AssertionError('CheckAgainstGoldDataGo failed')
except:
print('Failed to test against Go-generated test data.')
return False
print('Can read Go-generated test data, and Python generates bytewise identical data.')
return True
def CheckAgainstGoldDataJava():
try:
gen_buf, gen_off = make_monster_from_generated_code()
fn = 'monsterdata_java_wire.mon'
if not os.path.exists(fn):
print('Java-generated data does not exist, failed.')
return False
f = open(fn, 'rb')
java_wire_data = f.read()
f.close()
CheckReadBuffer(bytearray(java_wire_data), 0)
except:
print('Failed to read Java-generated test data.')
return False
print('Can read Java-generated test data.')
return True
class LCG(object):
''' Include simple random number generator to ensure results will be the
same cross platform.
http://en.wikipedia.org/wiki/Park%E2%80%93Miller_random_number_generator '''
__slots__ = ['n']
InitialLCGSeed = 48271
def __init__(self):
self.n = self.InitialLCGSeed
def Reset(self):
self.n = self.InitialLCGSeed
def Next(self):
self.n = ((self.n * 279470273) % 4294967291) & 0xFFFFFFFF
return self.n
def BenchmarkVtableDeduplication(count):
'''
BenchmarkVtableDeduplication measures the speed of vtable deduplication
by creating `prePop` vtables, then populating `count` objects with a
different single vtable.
When count is large (as in long benchmarks), memory usage may be high.
'''
prePop = 10
builder = flatbuffers.Builder(0)
# pre-populate some vtables:
for i in compat_range(prePop):
builder.StartObject(i)
for j in compat_range(i):
builder.PrependInt16Slot(j, j, 0)
builder.EndObject()
# benchmark deduplication of a new vtable:
def f():
builder.StartObject(prePop)
for j in compat_range(prePop):
builder.PrependInt16Slot(j, j, 0)
builder.EndObject()
duration = timeit.timeit(stmt=f, number=count)
rate = float(count) / duration
print(('vtable deduplication rate: %.2f/sec' % rate))
def BenchmarkCheckReadBuffer(count, buf, off):
'''
BenchmarkCheckReadBuffer measures the speed of flatbuffer reading
by re-using the CheckReadBuffer function with the gold data.
'''
def f():
CheckReadBuffer(buf, off)
duration = timeit.timeit(stmt=f, number=count)
rate = float(count) / duration
data = float(len(buf) * count) / float(1024 * 1024)
data_rate = data / float(duration)
print(('traversed %d %d-byte flatbuffers in %.2fsec: %.2f/sec, %.2fMB/sec')
% (count, len(buf), duration, rate, data_rate))
def BenchmarkMakeMonsterFromGeneratedCode(count, length):
'''
BenchmarkMakeMonsterFromGeneratedCode measures the speed of flatbuffer
creation by re-using the make_monster_from_generated_code function for
generating gold data examples.
'''
duration = timeit.timeit(stmt=make_monster_from_generated_code,
number=count)
rate = float(count) / duration
data = float(length * count) / float(1024 * 1024)
data_rate = data / float(duration)
print(('built %d %d-byte flatbuffers in %.2fsec: %.2f/sec, %.2fMB/sec' % \
(count, length, duration, rate, data_rate)))
def backward_compatible_run_tests(**kwargs):
if PY_VERSION < (2, 6):
sys.stderr.write("Python version less than 2.6 are not supported")
sys.stderr.flush()
return False
# python2.6 has a reduced-functionality unittest.main function:
if PY_VERSION == (2, 6):
try:
unittest.main(**kwargs)
except SystemExit as e:
if not e.code == 0:
return False
return True
# python2.7 and above let us not exit once unittest.main is run:
kwargs['exit'] = False
kwargs['verbosity'] = 0
ret = unittest.main(**kwargs)
if ret.result.errors or ret.result.failures:
return False
return True
def main():
import os
import sys
if not len(sys.argv) == 4:
sys.stderr.write('Usage: %s <benchmark vtable count>'
'<benchmark read count> <benchmark build count>\n'
% sys.argv[0])
sys.stderr.write(' Provide COMPARE_GENERATED_TO_GO=1 to check'
'for bytewise comparison to Go data.\n')
sys.stderr.write(' Provide COMPARE_GENERATED_TO_JAVA=1 to check'
'for bytewise comparison to Java data.\n')
sys.stderr.flush()
sys.exit(1)
kwargs = dict(argv=sys.argv[:-3])
# run tests, and run some language comparison checks if needed:
success = backward_compatible_run_tests(**kwargs)
if success and os.environ.get('COMPARE_GENERATED_TO_GO', 0) == "1":
success = success and CheckAgainstGoldDataGo()
if success and os.environ.get('COMPARE_GENERATED_TO_JAVA', 0) == "1":
success = success and CheckAgainstGoldDataJava()
if not success:
sys.stderr.write('Tests failed, skipping benchmarks.\n')
sys.stderr.flush()
sys.exit(1)
# run benchmarks (if 0, they will be a noop):
bench_vtable = int(sys.argv[1])
bench_traverse = int(sys.argv[2])
bench_build = int(sys.argv[3])
if bench_vtable:
BenchmarkVtableDeduplication(bench_vtable)
if bench_traverse:
buf, off = make_monster_from_generated_code()
BenchmarkCheckReadBuffer(bench_traverse, buf, off)
if bench_build:
buf, off = make_monster_from_generated_code()
BenchmarkMakeMonsterFromGeneratedCode(bench_build, len(buf))
if __name__ == '__main__':
main()
| apache-2.0 | 2,515,326,665,952,259,600 | -1,227,027,401,716,778,000 | 34.954048 | 91 | 0.583937 | false |
olivierkes/manuskript | manuskript/ui/views/outlineDelegates.py | 2 | 13273 | #!/usr/bin/env python
# --!-- coding: utf8 --!--
from PyQt5.QtCore import Qt, QSize, QModelIndex
from PyQt5.QtGui import QColor, QPalette, QIcon, QFont, QFontMetrics, QBrush
from PyQt5.QtWidgets import QStyledItemDelegate, QStyleOptionViewItem, QStyle, QComboBox, QStyleOptionComboBox
from PyQt5.QtWidgets import qApp
from manuskript import settings
from manuskript.enums import Character, Outline
from manuskript.functions import outlineItemColors, mixColors, colorifyPixmap, toInt, toFloat, drawProgress
from manuskript.ui import style as S
class outlineTitleDelegate(QStyledItemDelegate):
def __init__(self, parent=None):
QStyledItemDelegate.__init__(self, parent)
self._view = None
def setView(self, view):
self._view = view
def paint(self, painter, option, index):
item = index.internalPointer()
colors = outlineItemColors(item)
style = qApp.style()
opt = QStyleOptionViewItem(option)
self.initStyleOption(opt, index)
iconRect = style.subElementRect(style.SE_ItemViewItemDecoration, opt)
textRect = style.subElementRect(style.SE_ItemViewItemText, opt)
# Background
style.drawPrimitive(style.PE_PanelItemViewItem, opt, painter)
if settings.viewSettings["Outline"]["Background"] != "Nothing" and not opt.state & QStyle.State_Selected:
col = colors[settings.viewSettings["Outline"]["Background"]]
if col != QColor(Qt.transparent):
col2 = QColor(S.base)
if opt.state & QStyle.State_Selected:
col2 = opt.palette.brush(QPalette.Normal, QPalette.Highlight).color()
col = mixColors(col, col2, .2)
painter.save()
painter.setBrush(col)
painter.setPen(Qt.NoPen)
rect = opt.rect
if self._view:
r2 = self._view.visualRect(index)
rect = self._view.viewport().rect()
rect.setLeft(r2.left())
rect.setTop(r2.top())
rect.setBottom(r2.bottom())
painter.drawRoundedRect(rect, 5, 5)
painter.restore()
# Icon
mode = QIcon.Normal
if not opt.state & QStyle.State_Enabled:
mode = QIcon.Disabled
elif opt.state & QStyle.State_Selected:
mode = QIcon.Selected
state = QIcon.On if opt.state & QStyle.State_Open else QIcon.Off
icon = opt.icon.pixmap(iconRect.size(), mode=mode, state=state)
if opt.icon and settings.viewSettings["Outline"]["Icon"] != "Nothing":
color = colors[settings.viewSettings["Outline"]["Icon"]]
colorifyPixmap(icon, color)
opt.icon = QIcon(icon)
opt.icon.paint(painter, iconRect, opt.decorationAlignment, mode, state)
# Text
if opt.text:
painter.save()
textColor = QColor(S.text)
if option.state & QStyle.State_Selected:
col = QColor(S.highlightedText)
textColor = col
painter.setPen(col)
if settings.viewSettings["Outline"]["Text"] != "Nothing":
col = colors[settings.viewSettings["Outline"]["Text"]]
if col == Qt.transparent:
col = textColor
# If text color is Compile and item is selected, we have
# to change the color
if settings.viewSettings["Outline"]["Text"] == "Compile" and \
item.compile() in [0, "0"]:
col = mixColors(textColor, QColor(S.window))
painter.setPen(col)
f = QFont(opt.font)
painter.setFont(f)
fm = QFontMetrics(f)
elidedText = fm.elidedText(opt.text, Qt.ElideRight, textRect.width())
painter.drawText(textRect, Qt.AlignLeft, elidedText)
painter.restore()
# QStyledItemDelegate.paint(self, painter, option, index)
class outlineCharacterDelegate(QStyledItemDelegate):
def __init__(self, mdlCharacter, parent=None):
QStyledItemDelegate.__init__(self, parent)
self.mdlCharacter = mdlCharacter
def sizeHint(self, option, index):
# s = QStyledItemDelegate.sizeHint(self, option, index)
item = QModelIndex()
character = self.mdlCharacter.getCharacterByID(index.data())
if character:
item = character.index(Character.name)
opt = QStyleOptionViewItem(option)
self.initStyleOption(opt, item)
s = QStyledItemDelegate.sizeHint(self, opt, item)
if s.width() > 200:
s.setWidth(200)
elif s.width() < 100:
s.setWidth(100)
return s + QSize(18, 0)
def createEditor(self, parent, option, index):
item = index.internalPointer()
# if item.isFolder(): # No POV for folders
# return
editor = QComboBox(parent)
editor.setAutoFillBackground(True)
editor.setFrame(False)
return editor
def setEditorData(self, editor, index):
# editor.addItem("")
editor.addItem(QIcon.fromTheme("dialog-no"), self.tr("None"))
l = [self.tr("Main"), self.tr("Secondary"), self.tr("Minor")]
for importance in range(3):
editor.addItem(l[importance])
editor.setItemData(editor.count() - 1, QBrush(QColor(S.highlightedTextDark)), Qt.ForegroundRole)
editor.setItemData(editor.count() - 1, QBrush(QColor(S.highlightLight)), Qt.BackgroundRole)
item = editor.model().item(editor.count() - 1)
item.setFlags(Qt.ItemIsEnabled)
for i in range(self.mdlCharacter.rowCount()):
imp = toInt(self.mdlCharacter.importance(i))
if not 2 - imp == importance: continue
# try:
editor.addItem(self.mdlCharacter.icon(i), self.mdlCharacter.name(i), self.mdlCharacter.ID(i))
editor.setItemData(editor.count() - 1, self.mdlCharacter.name(i), Qt.ToolTipRole)
# except:
# pass
editor.setCurrentIndex(editor.findData(index.data()))
editor.showPopup()
def setModelData(self, editor, model, index):
val = editor.currentData()
model.setData(index, val)
def paint(self, painter, option, index):
##option.rect.setWidth(option.rect.width() - 18)
# QStyledItemDelegate.paint(self, painter, option, index)
##option.rect.setWidth(option.rect.width() + 18)
itemIndex = QModelIndex()
character = self.mdlCharacter.getCharacterByID(index.data())
if character:
itemIndex = character.index(Character.name)
opt = QStyleOptionViewItem(option)
self.initStyleOption(opt, itemIndex)
qApp.style().drawControl(QStyle.CE_ItemViewItem, opt, painter)
# if index.isValid() and index.internalPointer().data(Outline.POV) not in ["", None]:
if itemIndex.isValid() and self.mdlCharacter.data(itemIndex) not in ["", None]:
opt = QStyleOptionComboBox()
opt.rect = option.rect
r = qApp.style().subControlRect(QStyle.CC_ComboBox, opt, QStyle.SC_ComboBoxArrow)
option.rect = r
qApp.style().drawPrimitive(QStyle.PE_IndicatorArrowDown, option, painter)
class outlineCompileDelegate(QStyledItemDelegate):
def __init__(self, parent=None):
QStyledItemDelegate.__init__(self, parent)
def displayText(self, value, locale):
return ""
#def createEditor(self, parent, option, index):
#return None
class outlineGoalPercentageDelegate(QStyledItemDelegate):
def __init__(self, rootIndex=None, parent=None):
QStyledItemDelegate.__init__(self, parent)
self.rootIndex = rootIndex
def sizeHint(self, option, index):
sh = QStyledItemDelegate.sizeHint(self, option, index)
# if sh.width() > 50:
sh.setWidth(100)
return sh
def paint(self, painter, option, index):
if not index.isValid():
return QStyledItemDelegate.paint(self, painter, option, index)
QStyledItemDelegate.paint(self, painter, option, index)
item = index.internalPointer()
if not item.data(Outline.goal):
return
p = toFloat(item.data(Outline.goalPercentage))
typ = item.data(Outline.type)
level = item.level()
if self.rootIndex and self.rootIndex.isValid():
level -= self.rootIndex.internalPointer().level() + 1
margin = 5
height = max(min(option.rect.height() - 2 * margin, 12) - 2 * level, 6)
painter.save()
rect = option.rect.adjusted(margin, margin, -margin, -margin)
# Move
rect.translate(level * rect.width() / 10, 0)
rect.setWidth(rect.width() - level * rect.width() / 10)
rect.setHeight(height)
rect.setTop(option.rect.top() + (option.rect.height() - height) / 2)
drawProgress(painter, rect, p) # from functions
painter.restore()
def displayText(self, value, locale):
return ""
class outlineStatusDelegate(QStyledItemDelegate):
def __init__(self, mdlStatus, parent=None):
QStyledItemDelegate.__init__(self, parent)
self.mdlStatus = mdlStatus
def sizeHint(self, option, index):
s = QStyledItemDelegate.sizeHint(self, option, index)
if s.width() > 150:
s.setWidth(150)
elif s.width() < 50:
s.setWidth(50)
return s + QSize(18, 0)
def createEditor(self, parent, option, index):
editor = QComboBox(parent)
editor.setAutoFillBackground(True)
editor.setFrame(False)
return editor
def setEditorData(self, editor, index):
for i in range(self.mdlStatus.rowCount()):
editor.addItem(self.mdlStatus.item(i, 0).text())
val = index.internalPointer().data(Outline.status)
if not val: val = 0
editor.setCurrentIndex(int(val))
editor.showPopup()
def setModelData(self, editor, model, index):
val = editor.currentIndex()
model.setData(index, val)
def displayText(self, value, locale):
try:
return self.mdlStatus.item(int(value), 0).text()
except:
return ""
def paint(self, painter, option, index):
QStyledItemDelegate.paint(self, painter, option, index)
if index.isValid() and index.internalPointer().data(Outline.status) not in ["", None, "0", 0]:
opt = QStyleOptionComboBox()
opt.rect = option.rect
r = qApp.style().subControlRect(QStyle.CC_ComboBox, opt, QStyle.SC_ComboBoxArrow)
option.rect = r
qApp.style().drawPrimitive(QStyle.PE_IndicatorArrowDown, option, painter)
class outlineLabelDelegate(QStyledItemDelegate):
def __init__(self, mdlLabels, parent=None):
QStyledItemDelegate.__init__(self, parent)
self.mdlLabels = mdlLabels
def sizeHint(self, option, index):
d = index.internalPointer().data(index.column(), Qt.DisplayRole)
if not d:
d = 0
item = self.mdlLabels.item(int(d), 0)
idx = self.mdlLabels.indexFromItem(item)
opt = QStyleOptionViewItem(option)
self.initStyleOption(opt, idx)
s = qApp.style().sizeFromContents(QStyle.CT_ItemViewItem, opt, QSize(), None)
if s.width() > 150:
s.setWidth(150)
elif s.width() < 50:
s.setWidth(50)
return s + QSize(18, 0)
def createEditor(self, parent, option, index):
item = index.internalPointer()
editor = QComboBox(parent)
# editor.setAutoFillBackground(True)
editor.setFrame(False)
return editor
def setEditorData(self, editor, index):
for i in range(self.mdlLabels.rowCount()):
editor.addItem(self.mdlLabels.item(i, 0).icon(),
self.mdlLabels.item(i, 0).text())
val = index.internalPointer().data(Outline.label)
if not val: val = 0
editor.setCurrentIndex(int(val))
editor.showPopup()
def setModelData(self, editor, model, index):
val = editor.currentIndex()
model.setData(index, val)
def paint(self, painter, option, index):
if not index.isValid():
return QStyledItemDelegate.paint(self, painter, option, index)
else:
item = index.internalPointer()
d = item.data(index.column(), Qt.DisplayRole)
if not d:
d = 0
lbl = self.mdlLabels.item(int(d), 0)
opt = QStyleOptionViewItem(option)
self.initStyleOption(opt, self.mdlLabels.indexFromItem(lbl))
qApp.style().drawControl(QStyle.CE_ItemViewItem, opt, painter)
# Drop down indicator
if index.isValid() and index.internalPointer().data(Outline.label) not in ["", None, "0", 0]:
opt = QStyleOptionComboBox()
opt.rect = option.rect
r = qApp.style().subControlRect(QStyle.CC_ComboBox, opt, QStyle.SC_ComboBoxArrow)
option.rect = r
qApp.style().drawPrimitive(QStyle.PE_IndicatorArrowDown, option, painter)
| gpl-3.0 | -4,893,614,048,736,505,000 | 115,159,461,114,260,430 | 35.265027 | 113 | 0.609131 | false |
genome21/dcos-cli | dcos/cmds.py | 5 | 1376 | import collections
from dcos.errors import DCOSException
Command = collections.namedtuple(
'Command',
['hierarchy', 'arg_keys', 'function'])
"""Describe a CLI command.
:param hierarchy: the noun and verbs that need to be set for the command to
execute
:type hierarchy: list of str
:param arg_keys: the arguments that must get passed to the function; the order
of the keys determines the order in which they get passed to
the function
:type arg_keys: list of str
:param function: the function to execute
:type function: func(args) -> int
"""
def execute(cmds, args):
"""Executes one of the commands based on the arguments passed.
:param cmds: commands to try to execute; the order determines the order of
evaluation
:type cmds: list of Command
:param args: command line arguments
:type args: dict
:returns: the process status
:rtype: int
"""
for hierarchy, arg_keys, function in cmds:
# Let's find if the function matches the command
match = True
for positional in hierarchy:
if not args[positional]:
match = False
if match:
params = [args[name] for name in arg_keys]
return function(*params)
raise DCOSException('Could not find a command with the passed arguments')
| apache-2.0 | -8,753,510,241,404,899,000 | 279,434,295,130,169,380 | 29.577778 | 78 | 0.648983 | false |
feroda/JAGOM | apps/basic_groups/tests/__init__.py | 3 | 2761 | from django.core.urlresolvers import reverse
from django.test import TestCase
from basic_groups.models import BasicGroup
class BasicGroupsTest(TestCase):
fixtures = ["basic_groups_auth.json"]
urls = "basic_groups.tests.basic_groups_urls"
def test_unauth_create_get(self):
"""
can an unauth'd user get to page?
"""
response = self.client.get(reverse("group_create"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response["location"], "http://testserver/account/login/?next=/groups/create/")
def test_auth_create_get(self):
"""
can an auth'd user get to page?
"""
logged_in = self.client.login(username="tester", password="tester")
self.assertTrue(logged_in)
response = self.client.get(reverse("group_create"))
self.assertEqual(response.status_code, 200)
def test_unauth_create_post(self):
"""
can an unauth'd user post to create a new group?
"""
response = self.client.post(reverse("group_create"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response["location"], "http://testserver/account/login/?next=/groups/create/")
def test_auth_create_post(self):
"""
can an auth'd user post to create a new group?
"""
logged_in = self.client.login(username="tester", password="tester")
self.assertTrue(logged_in)
response = self.client.post(reverse("group_create"), {
"slug": "test",
"name": "Test Group",
"description": "A test group.",
})
self.assertEqual(response.status_code, 302)
self.assertEqual(response["location"], "http://testserver/groups/group/test/")
self.assertEqual(BasicGroup.objects.get(slug="test").creator.username, "tester")
self.assertEqual(BasicGroup.objects.get(slug="test").members.all()[0].username, "tester")
def test_auth_creator_membership(self):
"""
is membership for creator correct?
"""
logged_in = self.client.login(username="tester", password="tester")
self.assertTrue(logged_in)
response = self.client.post(reverse("group_create"), {
"slug": "test",
"name": "Test Group",
"description": "A test group.",
})
response = self.client.get(reverse("group_detail", args=["test"]))
self.assertEqual(BasicGroup.objects.get(slug="test").creator.username, "tester")
self.assertEqual(BasicGroup.objects.get(slug="test").members.all()[0].username, "tester")
self.assertEqual(response.context[0]["is_member"], True)
| agpl-3.0 | 667,709,109,372,680,200 | 5,384,939,450,900,317,000 | 37.887324 | 103 | 0.6092 | false |
ENCODE-DCC/encoded | src/encoded/tests/test_types_analysis.py | 1 | 1533 | import pytest
def test_types_analysis_title(
testapp,
analysis_released,
encode4_award,
ENCODE3_award,
encode_lab,
file_bam_1_1,
file_bam_2_1,
analysis_step_run_chip_encode4,
analysis_step_run_dnase_encode4,
pipeline_dnase_encode4,
pipeline_chip_encode4,
):
testapp.patch_json(analysis_released['@id'], {'files': [file_bam_1_1['@id']]})
res = testapp.get(analysis_released['@id'] + '@@index-data')
assert res.json['object']['title'] == 'Lab custom mm10'
testapp.patch_json(
file_bam_1_1['@id'],
{'step_run': analysis_step_run_chip_encode4['@id']}
)
testapp.patch_json(
pipeline_chip_encode4['@id'], {'lab': encode_lab['@id'], 'award': encode4_award['@id']})
res = testapp.get(analysis_released['@id'] + '@@index-data')
assert res.json['object']['title'] == 'ENCODE4 mm10'
testapp.patch_json(analysis_released['@id'], {'files': [file_bam_1_1['@id'], file_bam_2_1['@id']]})
testapp.patch_json(
file_bam_1_1['@id'],
{'step_run': analysis_step_run_chip_encode4['@id']}
)
testapp.patch_json(
file_bam_2_1['@id'],
{'step_run': analysis_step_run_dnase_encode4['@id']}
)
testapp.patch_json(
pipeline_dnase_encode4['@id'], {'lab': encode_lab['@id'], 'award': ENCODE3_award['@id']}
)
res = testapp.get(analysis_released['@id'] + '@@index-data')
print (res.json['object'])
assert res.json['object']['title'] == 'Mixed uniform (ENCODE3, ENCODE4) mm10'
| mit | -8,346,908,019,211,327,000 | 4,202,021,768,826,431,000 | 33.840909 | 103 | 0.590998 | false |
wkrzemien/DIRAC | Resources/MessageQueue/test/Test_MQConsumer.py | 5 | 3828 | """Unit tests of MQConsumer interface in the DIRAC.Resources.MessageQueue.MQConsumer
"""
import unittest
from DIRAC import S_OK, S_ERROR
from DIRAC.Resources.MessageQueue.MQConsumer import MQConsumer
from DIRAC.Resources.MessageQueue.MQConnectionManager import MQConnectionManager
from DIRAC.Resources.MessageQueue.MQConnector import MQConnector
class FakeMQConnector( MQConnector ):
def __init__( self, params={} ):
super( FakeMQConnector, self ).__init__()
def disconnect(self):
return S_OK("FakeMQConnection disconnecting")
def get(self, destination = ''):
return "FakeMQConnection getting message"
def subscribe( self, parameters = None):
return S_OK( 'Subscription successful' )
def unsubscribe(self, parameters):
return S_OK( 'Unsubscription successful' )
class TestMQConsumer( unittest.TestCase ):
def setUp( self ):
self.maxDiff = None # To show full difference between structures in case of error
dest = {}
dest.update({'/queue/FakeQueue': ['consumer4', 'consumer2']})
dest4 = {'/queue/test3': ['consumer1', 'consumer2','consumer3','consumer4']}
conn1 = {'MQConnector':FakeMQConnector(), 'destinations':dest}
conn2 = {'MQConnector':FakeMQConnector(), 'destinations':dest4}
storage = {'fake.cern.ch':conn1, 'testdir.blabla.ch':conn2}
self.myManager = MQConnectionManager(connectionStorage = storage)
def tearDown( self ):
pass
class TestMQConsumer_get( TestMQConsumer):
def test_failure( self ):
consumer = MQConsumer(mqManager = self.myManager, mqURI = "fake.cern.ch::Queue::FakeQueue", consumerId = 'consumer1')
result = consumer.get()
self.assertFalse(result['OK'])
self.assertEqual(result['Message'], 'No messages ( 1141 : No messages in queue)')
def test_sucess( self ):
consumer = MQConsumer(mqManager = self.myManager, mqURI = "bad.cern.ch::Queue::FakeQueue", consumerId = 'consumer1')
result = consumer.get()
self.assertFalse(result['OK'])
class TestMQConsumer_close( TestMQConsumer):
def test_success( self ):
consumer = MQConsumer(mqManager = self.myManager, mqURI = "fake.cern.ch::Queue::FakeQueue", consumerId ='consumer4')
result = consumer.close()
self.assertTrue(result['OK'])
def test_failure( self ):
consumer = MQConsumer(mqManager = self.myManager, mqURI = "fake.cern.ch::Queue::FakeQueue", consumerId ='consumer4')
result = consumer.close()
self.assertTrue(result['OK'])
result = consumer.close()
self.assertFalse(result['OK'])
self.assertEqual(result['Message'], 'MQ connection failure ( 1142 : Failed to stop the connection!The messenger consumer4 does not exist!)')
def test_failure2( self ):
consumer = MQConsumer(mqManager = self.myManager, mqURI = "fake.cern.ch::Queue::FakeQueue", consumerId ='consumer4')
consumer2 = MQConsumer(mqManager = self.myManager, mqURI = "fake.cern.ch::Queue::FakeQueue", consumerId ='consumer2')
result = consumer.close()
self.assertTrue(result['OK'])
result = consumer.close()
self.assertFalse(result['OK'])
self.assertEqual(result['Message'], 'MQ connection failure ( 1142 : Failed to stop the connection!The messenger consumer4 does not exist!)')
result = consumer2.close()
self.assertTrue(result['OK'])
result = consumer2.close()
self.assertFalse(result['OK'])
self.assertEqual(result['Message'], 'MQ connection failure ( 1142 : Failed to stop the connection!The messenger consumer2 does not exist!)')
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase( TestMQConsumer )
suite.addTest( unittest.defaultTestLoader.loadTestsFromTestCase( TestMQConsumer_get))
suite.addTest( unittest.defaultTestLoader.loadTestsFromTestCase( TestMQConsumer_close))
testResult = unittest.TextTestRunner( verbosity = 2 ).run( suite )
| gpl-3.0 | 1,593,566,916,994,002,000 | 5,373,553,482,698,274,000 | 48.076923 | 144 | 0.719697 | false |
zackslash/scrapy | scrapy/extensions/httpcache.py | 102 | 16232 | from __future__ import print_function
import os
import gzip
from six.moves import cPickle as pickle
from importlib import import_module
from time import time
from weakref import WeakKeyDictionary
from email.utils import mktime_tz, parsedate_tz
from w3lib.http import headers_raw_to_dict, headers_dict_to_raw
from scrapy.http import Headers, Response
from scrapy.responsetypes import responsetypes
from scrapy.utils.request import request_fingerprint
from scrapy.utils.project import data_path
from scrapy.utils.httpobj import urlparse_cached
class DummyPolicy(object):
def __init__(self, settings):
self.ignore_schemes = settings.getlist('HTTPCACHE_IGNORE_SCHEMES')
self.ignore_http_codes = [int(x) for x in settings.getlist('HTTPCACHE_IGNORE_HTTP_CODES')]
def should_cache_request(self, request):
return urlparse_cached(request).scheme not in self.ignore_schemes
def should_cache_response(self, response, request):
return response.status not in self.ignore_http_codes
def is_cached_response_fresh(self, response, request):
return True
def is_cached_response_valid(self, cachedresponse, response, request):
return True
class RFC2616Policy(object):
MAXAGE = 3600 * 24 * 365 # one year
def __init__(self, settings):
self.always_store = settings.getbool('HTTPCACHE_ALWAYS_STORE')
self.ignore_schemes = settings.getlist('HTTPCACHE_IGNORE_SCHEMES')
self.ignore_response_cache_controls = settings.getlist('HTTPCACHE_IGNORE_RESPONSE_CACHE_CONTROLS')
self._cc_parsed = WeakKeyDictionary()
def _parse_cachecontrol(self, r):
if r not in self._cc_parsed:
cch = r.headers.get('Cache-Control', '')
parsed = parse_cachecontrol(cch)
if isinstance(r, Response):
for key in self.ignore_response_cache_controls:
parsed.pop(key, None)
self._cc_parsed[r] = parsed
return self._cc_parsed[r]
def should_cache_request(self, request):
if urlparse_cached(request).scheme in self.ignore_schemes:
return False
cc = self._parse_cachecontrol(request)
# obey user-agent directive "Cache-Control: no-store"
if 'no-store' in cc:
return False
# Any other is eligible for caching
return True
def should_cache_response(self, response, request):
# What is cacheable - http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec14.9.1
# Response cacheability - http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.4
# Status code 206 is not included because cache can not deal with partial contents
cc = self._parse_cachecontrol(response)
# obey directive "Cache-Control: no-store"
if 'no-store' in cc:
return False
# Never cache 304 (Not Modified) responses
elif response.status == 304:
return False
# Cache unconditionally if configured to do so
elif self.always_store:
return True
# Any hint on response expiration is good
elif 'max-age' in cc or 'Expires' in response.headers:
return True
# Firefox fallbacks this statuses to one year expiration if none is set
elif response.status in (300, 301, 308):
return True
# Other statuses without expiration requires at least one validator
elif response.status in (200, 203, 401):
return 'Last-Modified' in response.headers or 'ETag' in response.headers
# Any other is probably not eligible for caching
# Makes no sense to cache responses that does not contain expiration
# info and can not be revalidated
else:
return False
def is_cached_response_fresh(self, cachedresponse, request):
cc = self._parse_cachecontrol(cachedresponse)
ccreq = self._parse_cachecontrol(request)
if 'no-cache' in cc or 'no-cache' in ccreq:
return False
now = time()
freshnesslifetime = self._compute_freshness_lifetime(cachedresponse, request, now)
currentage = self._compute_current_age(cachedresponse, request, now)
reqmaxage = self._get_max_age(ccreq)
if reqmaxage is not None:
freshnesslifetime = min(freshnesslifetime, reqmaxage)
if currentage < freshnesslifetime:
return True
if 'max-stale' in ccreq and 'must-revalidate' not in cc:
# From RFC2616: "Indicates that the client is willing to
# accept a response that has exceeded its expiration time.
# If max-stale is assigned a value, then the client is
# willing to accept a response that has exceeded its
# expiration time by no more than the specified number of
# seconds. If no value is assigned to max-stale, then the
# client is willing to accept a stale response of any age."
staleage = ccreq['max-stale']
if staleage is None:
return True
try:
if currentage < freshnesslifetime + max(0, int(staleage)):
return True
except ValueError:
pass
# Cached response is stale, try to set validators if any
self._set_conditional_validators(request, cachedresponse)
return False
def is_cached_response_valid(self, cachedresponse, response, request):
# Use the cached response if the new response is a server error,
# as long as the old response didn't specify must-revalidate.
if response.status >= 500:
cc = self._parse_cachecontrol(cachedresponse)
if 'must-revalidate' not in cc:
return True
# Use the cached response if the server says it hasn't changed.
return response.status == 304
def _set_conditional_validators(self, request, cachedresponse):
if 'Last-Modified' in cachedresponse.headers:
request.headers['If-Modified-Since'] = cachedresponse.headers['Last-Modified']
if 'ETag' in cachedresponse.headers:
request.headers['If-None-Match'] = cachedresponse.headers['ETag']
def _get_max_age(self, cc):
try:
return max(0, int(cc['max-age']))
except (KeyError, ValueError):
return None
def _compute_freshness_lifetime(self, response, request, now):
# Reference nsHttpResponseHead::ComputeFreshnessLifetime
# http://dxr.mozilla.org/mozilla-central/source/netwerk/protocol/http/nsHttpResponseHead.cpp#410
cc = self._parse_cachecontrol(response)
maxage = self._get_max_age(cc)
if maxage is not None:
return maxage
# Parse date header or synthesize it if none exists
date = rfc1123_to_epoch(response.headers.get('Date')) or now
# Try HTTP/1.0 Expires header
if 'Expires' in response.headers:
expires = rfc1123_to_epoch(response.headers['Expires'])
# When parsing Expires header fails RFC 2616 section 14.21 says we
# should treat this as an expiration time in the past.
return max(0, expires - date) if expires else 0
# Fallback to heuristic using last-modified header
# This is not in RFC but on Firefox caching implementation
lastmodified = rfc1123_to_epoch(response.headers.get('Last-Modified'))
if lastmodified and lastmodified <= date:
return (date - lastmodified) / 10
# This request can be cached indefinitely
if response.status in (300, 301, 308):
return self.MAXAGE
# Insufficient information to compute fresshness lifetime
return 0
def _compute_current_age(self, response, request, now):
# Reference nsHttpResponseHead::ComputeCurrentAge
# http://dxr.mozilla.org/mozilla-central/source/netwerk/protocol/http/nsHttpResponseHead.cpp#366
currentage = 0
# If Date header is not set we assume it is a fast connection, and
# clock is in sync with the server
date = rfc1123_to_epoch(response.headers.get('Date')) or now
if now > date:
currentage = now - date
if 'Age' in response.headers:
try:
age = int(response.headers['Age'])
currentage = max(currentage, age)
except ValueError:
pass
return currentage
class DbmCacheStorage(object):
def __init__(self, settings):
self.cachedir = data_path(settings['HTTPCACHE_DIR'], createdir=True)
self.expiration_secs = settings.getint('HTTPCACHE_EXPIRATION_SECS')
self.dbmodule = import_module(settings['HTTPCACHE_DBM_MODULE'])
self.db = None
def open_spider(self, spider):
dbpath = os.path.join(self.cachedir, '%s.db' % spider.name)
self.db = self.dbmodule.open(dbpath, 'c')
def close_spider(self, spider):
self.db.close()
def retrieve_response(self, spider, request):
data = self._read_data(spider, request)
if data is None:
return # not cached
url = data['url']
status = data['status']
headers = Headers(data['headers'])
body = data['body']
respcls = responsetypes.from_args(headers=headers, url=url)
response = respcls(url=url, headers=headers, status=status, body=body)
return response
def store_response(self, spider, request, response):
key = self._request_key(request)
data = {
'status': response.status,
'url': response.url,
'headers': dict(response.headers),
'body': response.body,
}
self.db['%s_data' % key] = pickle.dumps(data, protocol=2)
self.db['%s_time' % key] = str(time())
def _read_data(self, spider, request):
key = self._request_key(request)
db = self.db
tkey = '%s_time' % key
if tkey not in db:
return # not found
ts = db[tkey]
if 0 < self.expiration_secs < time() - float(ts):
return # expired
return pickle.loads(db['%s_data' % key])
def _request_key(self, request):
return request_fingerprint(request)
class FilesystemCacheStorage(object):
def __init__(self, settings):
self.cachedir = data_path(settings['HTTPCACHE_DIR'])
self.expiration_secs = settings.getint('HTTPCACHE_EXPIRATION_SECS')
self.use_gzip = settings.getbool('HTTPCACHE_GZIP')
self._open = gzip.open if self.use_gzip else open
def open_spider(self, spider):
pass
def close_spider(self, spider):
pass
def retrieve_response(self, spider, request):
"""Return response if present in cache, or None otherwise."""
metadata = self._read_meta(spider, request)
if metadata is None:
return # not cached
rpath = self._get_request_path(spider, request)
with self._open(os.path.join(rpath, 'response_body'), 'rb') as f:
body = f.read()
with self._open(os.path.join(rpath, 'response_headers'), 'rb') as f:
rawheaders = f.read()
url = metadata.get('response_url')
status = metadata['status']
headers = Headers(headers_raw_to_dict(rawheaders))
respcls = responsetypes.from_args(headers=headers, url=url)
response = respcls(url=url, headers=headers, status=status, body=body)
return response
def store_response(self, spider, request, response):
"""Store the given response in the cache."""
rpath = self._get_request_path(spider, request)
if not os.path.exists(rpath):
os.makedirs(rpath)
metadata = {
'url': request.url,
'method': request.method,
'status': response.status,
'response_url': response.url,
'timestamp': time(),
}
with self._open(os.path.join(rpath, 'meta'), 'wb') as f:
f.write(repr(metadata))
with self._open(os.path.join(rpath, 'pickled_meta'), 'wb') as f:
pickle.dump(metadata, f, protocol=2)
with self._open(os.path.join(rpath, 'response_headers'), 'wb') as f:
f.write(headers_dict_to_raw(response.headers))
with self._open(os.path.join(rpath, 'response_body'), 'wb') as f:
f.write(response.body)
with self._open(os.path.join(rpath, 'request_headers'), 'wb') as f:
f.write(headers_dict_to_raw(request.headers))
with self._open(os.path.join(rpath, 'request_body'), 'wb') as f:
f.write(request.body)
def _get_request_path(self, spider, request):
key = request_fingerprint(request)
return os.path.join(self.cachedir, spider.name, key[0:2], key)
def _read_meta(self, spider, request):
rpath = self._get_request_path(spider, request)
metapath = os.path.join(rpath, 'pickled_meta')
if not os.path.exists(metapath):
return # not found
mtime = os.stat(rpath).st_mtime
if 0 < self.expiration_secs < time() - mtime:
return # expired
with self._open(metapath, 'rb') as f:
return pickle.load(f)
class LeveldbCacheStorage(object):
def __init__(self, settings):
import leveldb
self._leveldb = leveldb
self.cachedir = data_path(settings['HTTPCACHE_DIR'], createdir=True)
self.expiration_secs = settings.getint('HTTPCACHE_EXPIRATION_SECS')
self.db = None
def open_spider(self, spider):
dbpath = os.path.join(self.cachedir, '%s.leveldb' % spider.name)
self.db = self._leveldb.LevelDB(dbpath)
def close_spider(self, spider):
# Do compactation each time to save space and also recreate files to
# avoid them being removed in storages with timestamp-based autoremoval.
self.db.CompactRange()
del self.db
def retrieve_response(self, spider, request):
data = self._read_data(spider, request)
if data is None:
return # not cached
url = data['url']
status = data['status']
headers = Headers(data['headers'])
body = data['body']
respcls = responsetypes.from_args(headers=headers, url=url)
response = respcls(url=url, headers=headers, status=status, body=body)
return response
def store_response(self, spider, request, response):
key = self._request_key(request)
data = {
'status': response.status,
'url': response.url,
'headers': dict(response.headers),
'body': response.body,
}
batch = self._leveldb.WriteBatch()
batch.Put('%s_data' % key, pickle.dumps(data, protocol=2))
batch.Put('%s_time' % key, str(time()))
self.db.Write(batch)
def _read_data(self, spider, request):
key = self._request_key(request)
try:
ts = self.db.Get('%s_time' % key)
except KeyError:
return # not found or invalid entry
if 0 < self.expiration_secs < time() - float(ts):
return # expired
try:
data = self.db.Get('%s_data' % key)
except KeyError:
return # invalid entry
else:
return pickle.loads(data)
def _request_key(self, request):
return request_fingerprint(request)
def parse_cachecontrol(header):
"""Parse Cache-Control header
http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9
>>> parse_cachecontrol('public, max-age=3600') == {'public': None,
... 'max-age': '3600'}
True
>>> parse_cachecontrol('') == {}
True
"""
directives = {}
for directive in header.split(','):
key, sep, val = directive.strip().partition('=')
if key:
directives[key.lower()] = val if sep else None
return directives
def rfc1123_to_epoch(date_str):
try:
return mktime_tz(parsedate_tz(date_str))
except Exception:
return None
| bsd-3-clause | -1,200,069,955,141,073,000 | 1,404,738,876,795,603,700 | 37.103286 | 106 | 0.616375 | false |
FedoraScientific/salome-hexablock | src/TEST_PY/test_unit/test_cuve.py | 1 | 2161 | # -*- coding: latin-1 -*-
# Copyright (C) 2009-2014 CEA/DEN, EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
#
# Francis KLOSS - 2011-2013 - CEA-Saclay, DEN, DM2S, SFME, LGLS, F-91191 Gif-sur-Yvette, France
# =============================================================================================
import hexablock
doc = hexablock.addDocument("cuve")
# Construire la grille cylindrique
# --------------------------------
centre = doc.addVertex(0, 0, 0)
vecteur_px = doc.addVector(1, 0, 0)
vecteur_pz = doc.addVector(0, 0, 1)
grille = doc.makeCylindrical(centre, vecteur_px, vecteur_pz, 1, 360, 1, 3, 8, 1, False)
# Ajouter le centre
# -----------------
quad_0 = grille.getQuadJK(0, 0, 0)
quad_6 = grille.getQuadJK(0, 6, 0)
quad_7 = grille.getQuadJK(0, 7, 0)
hexa_a = doc.addHexa3Quads(quad_0, quad_6, quad_7)
quad_2 = grille.getQuadJK(0, 2, 0)
quad_3 = grille.getQuadJK(0, 3, 0)
quad_4 = grille.getQuadJK(0, 4, 0)
hexa_b = doc.addHexa3Quads(quad_2, quad_3, quad_4)
quad_1 = grille.getQuadJK(0, 1, 0)
quad_5 = grille.getQuadJK(0, 5, 0)
quad_a = hexa_a.getQuad(1)
quad_b = hexa_b.getQuad(1)
hexa_c = doc.addHexa4Quads(quad_1, quad_a, quad_5, quad_b)
law = doc.addLaw("Uniform", 3)
for j in range(doc.countPropagation()):
propa = doc.getPropagation(j)
propa.setLaw(law)
mesh_hexas = hexablock.mesh (doc)
| lgpl-2.1 | 8,414,562,851,497,395,000 | -8,359,248,535,716,183,000 | 28.704225 | 95 | 0.643221 | false |
ptdtan/Ragout | lib/networkx/algorithms/block.py | 47 | 4055 | # encoding: utf-8
"""
Functions for creating network blockmodels from node partitions.
Created by Drew Conway <drew.conway@nyu.edu>
Copyright (c) 2010. All rights reserved.
"""
__author__ = """\n""".join(['Drew Conway <drew.conway@nyu.edu>',
'Aric Hagberg <hagberg@lanl.gov>'])
__all__=['blockmodel']
import networkx as nx
def blockmodel(G,partitions,multigraph=False):
"""Returns a reduced graph constructed using the generalized block modeling
technique.
The blockmodel technique collapses nodes into blocks based on a
given partitioning of the node set. Each partition of nodes
(block) is represented as a single node in the reduced graph.
Edges between nodes in the block graph are added according to the
edges in the original graph. If the parameter multigraph is False
(the default) a single edge is added with a weight equal to the
sum of the edge weights between nodes in the original graph
The default is a weight of 1 if weights are not specified. If the
parameter multigraph is True then multiple edges are added each
with the edge data from the original graph.
Parameters
----------
G : graph
A networkx Graph or DiGraph
partitions : list of lists, or list of sets
The partition of the nodes. Must be non-overlapping.
multigraph : bool, optional
If True return a MultiGraph with the edge data of the original
graph applied to each corresponding edge in the new graph.
If False return a Graph with the sum of the edge weights, or a
count of the edges if the original graph is unweighted.
Returns
-------
blockmodel : a Networkx graph object
Examples
--------
>>> G=nx.path_graph(6)
>>> partition=[[0,1],[2,3],[4,5]]
>>> M=nx.blockmodel(G,partition)
References
----------
.. [1] Patrick Doreian, Vladimir Batagelj, and Anuska Ferligoj
"Generalized Blockmodeling",Cambridge University Press, 2004.
"""
# Create sets of node partitions
part=list(map(set,partitions))
# Check for overlapping node partitions
u=set()
for p1,p2 in zip(part[:-1],part[1:]):
u.update(p1)
#if not u.isdisjoint(p2): # Python 2.6 required
if len (u.intersection(p2))>0:
raise nx.NetworkXException("Overlapping node partitions.")
# Initialize blockmodel graph
if multigraph:
if G.is_directed():
M=nx.MultiDiGraph()
else:
M=nx.MultiGraph()
else:
if G.is_directed():
M=nx.DiGraph()
else:
M=nx.Graph()
# Add nodes and properties to blockmodel
# The blockmodel nodes are node-induced subgraphs of G
# Label them with integers starting at 0
for i,p in zip(range(len(part)),part):
M.add_node(i)
# The node-induced subgraph is stored as the node 'graph' attribute
SG=G.subgraph(p)
M.node[i]['graph']=SG
M.node[i]['nnodes']=SG.number_of_nodes()
M.node[i]['nedges']=SG.number_of_edges()
M.node[i]['density']=nx.density(SG)
# Create mapping between original node labels and new blockmodel node labels
block_mapping={}
for n in M:
nodes_in_block=M.node[n]['graph'].nodes()
block_mapping.update(dict.fromkeys(nodes_in_block,n))
# Add edges to block graph
for u,v,d in G.edges(data=True):
bmu=block_mapping[u]
bmv=block_mapping[v]
if bmu==bmv: # no self loops
continue
if multigraph:
# For multigraphs add an edge for each edge in original graph
M.add_edge(bmu,bmv,attr_dict=d)
else:
# For graphs and digraphs add single weighted edge
weight=d.get('weight',1.0) # default to 1 if no weight specified
if M.has_edge(bmu,bmv):
M[bmu][bmv]['weight']+=weight
else:
M.add_edge(bmu,bmv,weight=weight)
return M
| gpl-3.0 | 590,908,475,268,509,400 | -7,441,348,357,208,335,000 | 34.26087 | 80 | 0.620222 | false |
damdam-s/account-analytic | __unported__/analytic_secondaxis/wizard/__init__.py | 11 | 1436 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010 Camptocamp SA (http://www.camptocamp.com)
# All Right Reserved
#
# Author : Joel Grand-guillaume (Camptocamp)
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from . import analytic_activity_chart
| agpl-3.0 | 6,930,283,288,696,278,000 | 8,461,346,732,470,975,000 | 43.875 | 78 | 0.682451 | false |
ColdHeat/binaryninja-python | Util.py | 4 | 10622 | # Copyright (c) 2011-2015 Rusty Wagner
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import struct
from PySide.QtCore import *
from PySide.QtGui import *
from Crypto.Hash import MD2
from Crypto.Hash import MD4
from Crypto.Hash import MD5
from Crypto.Hash import SHA
from Crypto.Hash import SHA256
from Crypto.Hash import HMAC
import Transform
def hex_dump_encode(data):
result = ""
for i in range(0, len(data), 16):
result += "%.8x:" % i
hex = ""
ascii = ""
for j in range(0, 16):
if (i + j) >= len(data):
hex += " "
else:
hex += " %.2x" % ord(data[i + j])
if (data[i + j] < ' ') or (data[i + j] > '~'):
ascii += "."
else:
ascii += data[i + j]
result += hex + " " + ascii + "\n"
return result
def hex_dump_decode(data):
result = ""
lines = data.split("\n")
for line in lines:
# Hex dump lines follow the following format:
# * An address, followed by any number of spaces
# * The hex dump itself, 16 bytes per line
# * Optionally two or more spaces, followed by the ASCII dump
line.strip(" \t")
if line.find(' ') == -1:
continue
hex = line[line.find(' '):].strip(" \t")
if hex.find(" ") != -1:
hex = hex[0:hex.find(" ")]
hex = hex.replace(" ", "")
hex = hex[0:32]
result += hex.decode("hex")
return result
def encode_utf16_string(data, char_escape):
if len(data) % 2:
raise ValueError, "Odd number of bytes"
result = ""
for i in range(0, len(data), 2):
value = struct.unpack("<H", data[i:i+2])[0]
if (value >= ' ') and (value <= '~'):
result += chr(value)
else:
result += char_escape + ("%.4x" % value)
return result
def encode_url(data):
result = ""
for i in range(0, len(data)):
if data[i] in ['-', '_', '.', '~']:
result += data[i]
elif (data[i] >= '0') and (data[i] <= '9'):
result += data[i]
elif (data[i] >= 'a') and (data[i] <= 'z'):
result += data[i]
elif (data[i] >= 'A') and (data[i] <= 'Z'):
result += data[i]
else:
result += "%%%.2x" % ord(data[i])
return result
def decode_url(data):
result = ""
i = 0
while i < len(data):
if data[i] == '%':
if data[i + 1] == 'u':
result += unichr(int(data[i+2:i+6], 16)).encode("utf8")
i += 6
else:
result += chr(int(data[i+1:i+3], 16))
i += 3
else:
result += data[i]
i += 1
return result
def encode_c_array(data, element_size, element_struct, type_name, postfix):
if len(data) % element_size:
raise ValueError, "Data length is not a multiple of the element size"
fmt = "0x%%.%dx%s" % (element_size * 2, postfix)
result = "{\n"
for i in range(0, len(data), 16):
line = ""
for j in range(0, 16, element_size):
if (i + j) >= len(data):
break
if j > 0:
line += ", "
value = struct.unpack(element_struct, data[i+j:i+j+element_size])[0]
line += fmt % value
if (i + 16) < len(data):
line += ","
result += "\t" + line + "\n"
return type_name + (" data[%d] = \n" % (len(data) / element_size)) + result + "};\n"
def decode_int_list(data, signed, unsigned):
result = ""
list = data.split(",")
for i in list:
i = i.strip(" \t\r\n")
value = int(i, 0)
if value < 0:
result += struct.pack(signed, value)
else:
result += struct.pack(unsigned, value)
return result
class CancelException(Exception):
pass
def request_key(obj):
dlg = Transform.KeyDialog(obj)
if dlg.exec_() == QDialog.Rejected:
raise CancelException
return dlg.key[:]
def populate_copy_as_menu(menu, obj, action_table):
string_menu = menu.addMenu("Escaped string")
action_table[string_menu.addAction("ASCII")] = lambda : obj.copy_as(lambda data : data.encode("string_escape").replace("\"", "\\\""), False)
action_table[string_menu.addAction("UTF-8 URL")] = lambda : obj.copy_as(encode_url, False)
action_table[string_menu.addAction("UTF-8 IDNA")] = lambda : obj.copy_as(lambda data : data.decode("utf8").encode("idna"), False)
action_table[string_menu.addAction("UTF-16 (\\u)")] = lambda : obj.copy_as(lambda data : encode_utf16_string(data, "\\u"), False)
action_table[string_menu.addAction("UTF-16 (%u)")] = lambda : obj.copy_as(lambda data : encode_utf16_string(data, "%u"), False)
action_table[string_menu.addAction("UTF-16 URL")] = lambda : obj.copy_as(lambda data : encode_url(data.decode("utf16").encode("utf8")), False)
action_table[string_menu.addAction("UTF-16 IDNA")] = lambda : obj.copy_as(lambda data : data.decode("utf16").encode("idna"), False)
unicode_menu = menu.addMenu("Unicode")
action_table[unicode_menu.addAction("UTF-16")] = lambda : obj.copy_as(lambda data : data.decode("utf16"), False)
action_table[unicode_menu.addAction("UTF-32")] = lambda : obj.copy_as(lambda data : data.decode("utf32"), False)
menu.addSeparator()
action_table[menu.addAction("Hex dump")] = lambda : obj.copy_as(hex_dump_encode, False)
action_table[menu.addAction("Raw hex")] = lambda : obj.copy_as(lambda data : data.encode("hex"), False)
action_table[menu.addAction("Base64")] = lambda : obj.copy_as(lambda data : data.encode("base64"), False)
action_table[menu.addAction("UUEncode")] = lambda : obj.copy_as(lambda data : data.encode("uu_codec"), False)
compress_menu = menu.addMenu("Compressed")
action_table[compress_menu.addAction("zlib")] = lambda : obj.copy_as(lambda data : data.encode("zlib"), True)
action_table[compress_menu.addAction("bz2")] = lambda : obj.copy_as(lambda data : data.encode("bz2"), True)
menu.addSeparator()
array_menu = menu.addMenu("C array")
action_table[array_menu.addAction("8-bit elements")] = lambda : obj.copy_as(lambda data : encode_c_array(data, 1, "B", "unsigned char", ""), False)
action_table[array_menu.addAction("16-bit elements")] = lambda : obj.copy_as(lambda data : encode_c_array(data, 2, "<H", "unsigned short", ""), False)
action_table[array_menu.addAction("32-bit elements")] = lambda : obj.copy_as(lambda data : encode_c_array(data, 4, "<I", "unsigned int", ""), False)
action_table[array_menu.addAction("64-bit elements")] = lambda : obj.copy_as(lambda data : encode_c_array(data, 8, "<Q", "unsigned long long", "LL"), False)
menu.addSeparator()
hash_menu = menu.addMenu("Hash")
action_table[hash_menu.addAction("MD2")] = lambda : obj.copy_as(lambda data : MD2.new(data).digest(), True)
action_table[hash_menu.addAction("MD4")] = lambda : obj.copy_as(lambda data : MD4.new(data).digest(), True)
action_table[hash_menu.addAction("MD5")] = lambda : obj.copy_as(lambda data : MD5.new(data).digest(), True)
action_table[hash_menu.addAction("SHA-1")] = lambda : obj.copy_as(lambda data : SHA.new(data).digest(), True)
action_table[hash_menu.addAction("SHA-256")] = lambda : obj.copy_as(lambda data : SHA256.new(data).digest(), True)
hmac_menu = hash_menu.addMenu("HMAC")
action_table[hmac_menu.addAction("MD2")] = lambda : obj.copy_as(lambda data : HMAC.new(request_key(obj), data, MD2).digest(), True)
action_table[hmac_menu.addAction("MD4")] = lambda : obj.copy_as(lambda data : HMAC.new(request_key(obj), data, MD4).digest(), True)
action_table[hmac_menu.addAction("MD5")] = lambda : obj.copy_as(lambda data : HMAC.new(request_key(obj), data, MD5).digest(), True)
action_table[hmac_menu.addAction("SHA-1")] = lambda : obj.copy_as(lambda data : HMAC.new(request_key(obj), data, SHA).digest(), True)
action_table[hmac_menu.addAction("SHA-256")] = lambda : obj.copy_as(lambda data : HMAC.new(request_key(obj), data, SHA256).digest(), True)
def populate_paste_from_menu(menu, obj, action_table):
string_menu = menu.addMenu("Escaped string")
action_table[string_menu.addAction("ASCII")] = lambda : obj.paste_from(lambda data : data.decode("string_escape"))
action_table[string_menu.addAction("UTF-8 URL")] = lambda : obj.paste_from(decode_url)
action_table[string_menu.addAction("UTF-8 IDNA")] = lambda : obj.paste_from(lambda data : data.decode("idna").encode("utf8"))
action_table[string_menu.addAction("UTF-16 (\\u)")] = lambda : obj.paste_from(lambda data : data.decode("unicode_escape").encode("utf-16le"))
action_table[string_menu.addAction("UTF-16 (%u)")] = lambda : obj.paste_from(lambda data : decode_url(data).decode("utf8").encode("utf-16le"))
action_table[string_menu.addAction("UTF-16 URL")] = lambda : obj.paste_from(lambda data : decode_url(data).decode("utf8").encode("utf-16le"))
action_table[string_menu.addAction("UTF-16 IDNA")] = lambda : obj.paste_from(lambda data : data.decode("idna").encode("utf-16le"))
unicode_menu = menu.addMenu("Unicode")
action_table[unicode_menu.addAction("UTF-16")] = lambda : obj.paste_from(lambda data : data.decode("utf8").encode("utf-16le"))
action_table[unicode_menu.addAction("UTF-32")] = lambda : obj.paste_from(lambda data : data.decode("utf8").encode("utf-32le"))
menu.addSeparator()
action_table[menu.addAction("Hex dump")] = lambda : obj.paste_from(hex_dump_decode)
action_table[menu.addAction("Raw hex")] = lambda : obj.paste_from(lambda data : data.translate(None, " ,\t\r\n").decode("hex"))
action_table[menu.addAction("Base64")] = lambda : obj.paste_from(lambda data : data.decode("base64"))
action_table[menu.addAction("UUEncode")] = lambda : obj.paste_from(lambda data : data.decode("uu_codec"))
action_table[menu.addAction("Python expression")] = lambda : obj.paste_from(lambda data : eval(data))
compress_menu = menu.addMenu("Compressed")
action_table[compress_menu.addAction("zlib")] = lambda : obj.paste_from(lambda data : data.decode("zlib"))
action_table[compress_menu.addAction("bz2")] = lambda : obj.paste_from(lambda data : data.decode("bz2"))
menu.addSeparator()
list_menu = menu.addMenu("Integer list")
action_table[list_menu.addAction("8-bit elements")] = lambda : obj.paste_from(lambda data : decode_int_list(data, "b", "B"))
action_table[list_menu.addAction("16-bit elements")] = lambda : obj.paste_from(lambda data : decode_int_list(data, "<h", "<H"))
action_table[list_menu.addAction("32-bit elements")] = lambda : obj.paste_from(lambda data : decode_int_list(data, "<i", "<I"))
action_table[list_menu.addAction("64-bit elements")] = lambda : obj.paste_from(lambda data : decode_int_list(data, "<q", "<Q"))
| gpl-2.0 | -6,857,994,661,529,102,000 | -6,681,550,364,083,353,000 | 48.175926 | 157 | 0.667106 | false |
jmesteve/openerp | openerp/addons/stock_invoice_directly/wizard/stock_invoice.py | 39 | 2344 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class invoice_directly(osv.osv_memory):
_inherit = 'stock.partial.picking'
def do_partial(self, cr, uid, ids, context=None):
"""Launch Create invoice wizard if invoice state is To be Invoiced,
after processing the partial picking.
"""
if context is None: context = {}
result = super(invoice_directly, self).do_partial(cr, uid, ids, context)
partial = self.browse(cr, uid, ids[0], context)
if partial.picking_id.state != 'done' and partial.picking_id.backorder_id:
# delivery is not finished, opening invoice on backorder
picking = partial.picking_id.backorder_id
else:
picking = partial.picking_id
context.update(active_model='stock.picking',
active_ids=[picking.id])
if picking.invoice_state == '2binvoiced':
return {
'name': 'Create Invoice',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'stock.invoice.onshipping',
'type': 'ir.actions.act_window',
'target': 'new',
'context': context
}
return {'type': 'ir.actions.act_window_close'}
invoice_directly()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -5,787,200,724,211,479,000 | -8,315,685,916,184,485,000 | 41.618182 | 82 | 0.586177 | false |
DPaaS-Raksha/horizon | horizon/exceptions.py | 7 | 12145 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exceptions raised by the Horizon code and the machinery for handling them.
"""
import logging
import os
import sys
from django.contrib.auth import logout
from django.http import HttpRequest
from django.utils import termcolors
from django.utils.translation import ugettext as _
from django.views.debug import SafeExceptionReporterFilter, CLEANSED_SUBSTITUTE
from horizon import messages
from horizon.conf import HORIZON_CONFIG
LOG = logging.getLogger(__name__)
PALETTE = termcolors.PALETTES[termcolors.DEFAULT_PALETTE]
class HorizonReporterFilter(SafeExceptionReporterFilter):
""" Error report filter that's always active, even in DEBUG mode. """
def is_active(self, request):
return True
# TODO(gabriel): This bugfix is cribbed from Django's code. When 1.4.1
# is available we can remove this code.
def get_traceback_frame_variables(self, request, tb_frame):
"""
Replaces the values of variables marked as sensitive with
stars (*********).
"""
# Loop through the frame's callers to see if the sensitive_variables
# decorator was used.
current_frame = tb_frame.f_back
sensitive_variables = None
while current_frame is not None:
if (current_frame.f_code.co_name == 'sensitive_variables_wrapper'
and 'sensitive_variables_wrapper'
in current_frame.f_locals):
# The sensitive_variables decorator was used, so we take note
# of the sensitive variables' names.
wrapper = current_frame.f_locals['sensitive_variables_wrapper']
sensitive_variables = getattr(wrapper,
'sensitive_variables',
None)
break
current_frame = current_frame.f_back
cleansed = []
if self.is_active(request) and sensitive_variables:
if sensitive_variables == '__ALL__':
# Cleanse all variables
for name, value in tb_frame.f_locals.items():
cleansed.append((name, CLEANSED_SUBSTITUTE))
return cleansed
else:
# Cleanse specified variables
for name, value in tb_frame.f_locals.items():
if name in sensitive_variables:
value = CLEANSED_SUBSTITUTE
elif isinstance(value, HttpRequest):
# Cleanse the request's POST parameters.
value = self.get_request_repr(value)
cleansed.append((name, value))
return cleansed
else:
# Potentially cleanse only the request if it's one of the
# frame variables.
for name, value in tb_frame.f_locals.items():
if isinstance(value, HttpRequest):
# Cleanse the request's POST parameters.
value = self.get_request_repr(value)
cleansed.append((name, value))
return cleansed
class HorizonException(Exception):
""" Base exception class for distinguishing our own exception classes. """
pass
class Http302(HorizonException):
"""
Error class which can be raised from within a handler to cause an
early bailout and redirect at the middleware level.
"""
status_code = 302
def __init__(self, location, message=None):
self.location = location
self.message = message
class NotAuthorized(HorizonException):
"""
Raised whenever a user attempts to access a resource which they do not
have permission-based access to (such as when failing the
:func:`~horizon.decorators.require_perms` decorator).
The included :class:`~horizon.middleware.HorizonMiddleware` catches
``NotAuthorized`` and handles it gracefully by displaying an error
message and redirecting the user to a login page.
"""
status_code = 401
class NotAuthenticated(HorizonException):
"""
Raised when a user is trying to make requests and they are not logged in.
The included :class:`~horizon.middleware.HorizonMiddleware` catches
``NotAuthenticated`` and handles it gracefully by displaying an error
message and redirecting the user to a login page.
"""
status_code = 403
class NotFound(HorizonException):
""" Generic error to replace all "Not Found"-type API errors. """
status_code = 404
class RecoverableError(HorizonException):
""" Generic error to replace any "Recoverable"-type API errors. """
status_code = 100 # HTTP status code "Continue"
class ServiceCatalogException(HorizonException):
"""
Raised when a requested service is not available in the ``ServiceCatalog``
returned by Keystone.
"""
def __init__(self, service_name):
message = 'Invalid service catalog service: %s' % service_name
super(ServiceCatalogException, self).__init__(message)
class AlreadyExists(HorizonException):
"""
Exception to be raised when trying to create an API resource which
already exists.
"""
def __init__(self, name, resource_type):
self.attrs = {"name": name, "resource": resource_type}
self.msg = 'A %(resource)s with the name "%(name)s" already exists.'
def __repr__(self):
return self.msg % self.attrs
def __str__(self):
return self.msg % self.attrs
def __unicode__(self):
return _(self.msg) % self.attrs
class WorkflowError(HorizonException):
""" Exception to be raised when something goes wrong in a workflow. """
pass
class WorkflowValidationError(HorizonException):
"""
Exception raised during workflow validation if required data is missing,
or existing data is not valid.
"""
pass
class HandledException(HorizonException):
"""
Used internally to track exceptions that have gone through
:func:`horizon.exceptions.handle` more than once.
"""
def __init__(self, wrapped):
self.wrapped = wrapped
UNAUTHORIZED = tuple(HORIZON_CONFIG['exceptions']['unauthorized'])
NOT_FOUND = tuple(HORIZON_CONFIG['exceptions']['not_found'])
RECOVERABLE = (AlreadyExists,)
RECOVERABLE += tuple(HORIZON_CONFIG['exceptions']['recoverable'])
def error_color(msg):
return termcolors.colorize(msg, **PALETTE['ERROR'])
def check_message(keywords, message):
"""
Checks an exception for given keywords and raises a new ``ActionError``
with the desired message if the keywords are found. This allows selective
control over API error messages.
"""
exc_type, exc_value, exc_traceback = sys.exc_info()
if set(str(exc_value).split(" ")).issuperset(set(keywords)):
exc_value._safe_message = message
raise
def handle(request, message=None, redirect=None, ignore=False,
escalate=False, log_level=None, force_log=None):
""" Centralized error handling for Horizon.
Because Horizon consumes so many different APIs with completely
different ``Exception`` types, it's necessary to have a centralized
place for handling exceptions which may be raised.
Exceptions are roughly divided into 3 types:
#. ``UNAUTHORIZED``: Errors resulting from authentication or authorization
problems. These result in being logged out and sent to the login screen.
#. ``NOT_FOUND``: Errors resulting from objects which could not be
located via the API. These generally result in a user-facing error
message, but are otherwise returned to the normal code flow. Optionally
a redirect value may be passed to the error handler so users are
returned to a different view than the one requested in addition to the
error message.
#. RECOVERABLE: Generic API errors which generate a user-facing message
but drop directly back to the regular code flow.
All other exceptions bubble the stack as normal unless the ``ignore``
argument is passed in as ``True``, in which case only unrecognized
errors are bubbled.
If the exception is not re-raised, an appropriate wrapper exception
class indicating the type of exception that was encountered will be
returned.
"""
exc_type, exc_value, exc_traceback = sys.exc_info()
log_method = getattr(LOG, log_level or "exception")
force_log = force_log or os.environ.get("HORIZON_TEST_RUN", False)
force_silence = getattr(exc_value, "silence_logging", False)
# Because the same exception may travel through this method more than
# once (if it's re-raised) we may want to treat it differently
# the second time (e.g. no user messages/logging).
handled = issubclass(exc_type, HandledException)
wrap = False
# Restore our original exception information, but re-wrap it at the end
if handled:
exc_type, exc_value, exc_traceback = exc_value.wrapped
wrap = True
# We trust messages from our own exceptions
if issubclass(exc_type, HorizonException):
message = exc_value
# Check for an override message
elif getattr(exc_value, "_safe_message", None):
message = exc_value._safe_message
# If the message has a placeholder for the exception, fill it in
elif message and "%(exc)s" in message:
message = message % {"exc": exc_value}
if issubclass(exc_type, UNAUTHORIZED):
if ignore:
return NotAuthorized
if not force_silence and not handled:
log_method(error_color("Unauthorized: %s" % exc_value))
if not handled:
if message:
message = _("Unauthorized: %s") % message
# We get some pretty useless error messages back from
# some clients, so let's define our own fallback.
fallback = _("Unauthorized. Please try logging in again.")
messages.error(request, message or fallback)
# Escalation means logging the user out and raising NotAuthorized
# so the middleware will redirect them appropriately.
if escalate:
logout(request)
raise NotAuthorized
# Otherwise continue and present our "unauthorized" error message.
return NotAuthorized
if issubclass(exc_type, NOT_FOUND):
wrap = True
if not force_silence and not handled and (not ignore or force_log):
log_method(error_color("Not Found: %s" % exc_value))
if not ignore and not handled:
messages.error(request, message or exc_value)
if redirect:
raise Http302(redirect)
if not escalate:
return NotFound # return to normal code flow
if issubclass(exc_type, RECOVERABLE):
wrap = True
if not force_silence and not handled and (not ignore or force_log):
# Default recoverable error to WARN log level
log_method = getattr(LOG, log_level or "warning")
log_method(error_color("Recoverable error: %s" % exc_value))
if not ignore and not handled:
messages.error(request, message or exc_value)
if redirect:
raise Http302(redirect)
if not escalate:
return RecoverableError # return to normal code flow
# If we've gotten here, time to wrap and/or raise our exception.
if wrap:
raise HandledException([exc_type, exc_value, exc_traceback])
raise exc_type, exc_value, exc_traceback
| apache-2.0 | -6,186,757,432,158,029,000 | 7,106,103,710,376,652,000 | 36.834891 | 79 | 0.655578 | false |
gooddata/openstack-nova | nova/db/sqlalchemy/api_migrations/migrate_repo/versions/018_instance_groups.py | 14 | 2633 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""API Database migrations for instance_groups"""
from migrate import UniqueConstraint
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import ForeignKey
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
groups = Table('instance_groups', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('uuid', String(length=36), nullable=False),
Column('name', String(length=255)),
UniqueConstraint('uuid',
name='uniq_instance_groups0uuid'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
groups.create(checkfirst=True)
group_policy = Table('instance_group_policy', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('policy', String(length=255)),
Column('group_id', Integer, ForeignKey('instance_groups.id'),
nullable=False),
Index('instance_group_policy_policy_idx', 'policy'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
group_policy.create(checkfirst=True)
group_member = Table('instance_group_member', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_uuid', String(length=255)),
Column('group_id', Integer, ForeignKey('instance_groups.id'),
nullable=False),
Index('instance_group_member_instance_idx', 'instance_uuid'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
group_member.create(checkfirst=True)
| apache-2.0 | 7,051,562,533,655,466,000 | -987,946,846,018,279,700 | 36.084507 | 78 | 0.663122 | false |
Groupe24/CodeInSpace | tournoi/37/game_1.py | 1 | 62455 | # -*- coding: utf-8 -*-
import random
import shutil
import time
import socket
def game_preparation(file):
""" Prepare the data structures need to play the game based on a .mv file.
Return
------
- field: list of lists containing the characters contained in each cases of the board (list).
- redfleet: dictionnary of dictionnaries containing the data of the ships of the red player (dict).
- bluefleet: dictionnary of dictionnaries containing the data of the ships of the red player (dict).
- asteroids: list of dictionnaries containing the data of the ships of the red player (list).
Version
-------
- specification: Simon Defrenne (v.2 03/03/18)
- implementation: Simon Defrenne (v.2 03/03/18)
"""
# preparation work
fh = open(file,"r")
prep = fh.readlines()
# generate store
store = {"scout":{"health":3,"attack":1,"range":3,"cost":3},
"warship":{"health":18,"attack":3,"range":5,"cost":9},
"excavator-S":{"health":2,"tonnage":1,"cost":1},
"excavator-M":{"health":3,"tonnage":4,"cost":2},
"excavator-L":{"health":6,"tonnage":8,"cost":4}}
# generate fleets and portals
redfleet = {}
bluefleet = {}
redfleet["portal"] = {"type":"portal","health":100,"hitbox":[int(str.split(prep[3]," ")[0]),int(str.split(prep[3]," ")[1])],"ore":4,"locked":[],"identifiant":"R","score":0}
bluefleet["portal"] = {"type":"portal","health":100,"hitbox":[int(str.split(prep[4]," ")[0]),int(str.split(prep[4]," ")[1])],"ore":4,"locked":[],"identifiant":"B","score":0}
# generate asteroids
asteroids = []
asteroidsprep1 = prep[6:len(prep)]
for asteroid in asteroidsprep1:
asteroidsprep2 = str.split(asteroid," ")
asteroidsprep2[-1] = asteroidsprep2[-1].replace("\n","")
asteroidsprep3 = {"hitbox":[int(asteroidsprep2[0]),int(asteroidsprep2[1])],"ore":int(asteroidsprep2[2]),"harvest":int(asteroidsprep2[3]),"locked":[]}
asteroids.append(asteroidsprep3)
# stock the size of the map
mapsize = [int(str.split(prep[1]," ")[0])-1,int(str.split(prep[1]," ")[1])-1]
# cleaning work
fh.close
return redfleet,bluefleet,asteroids,store,mapsize
def manhattan_distance(case_1,case_2):
""" Calculate the distance between two case on the field.
Parameters
----------
- case_1 : a particular case on the field (list)
- case_2 : a particular case on the field (list)
Return
------
- distance: the distance between the two case (int)
Version
-------
- specification: Simon Defrenne (v.1 18/03/18)
- implementation: Simon Defrenne (v.1 18/03/18)
"""
x_p,y_p = coordinates(case_1)
x_e,y_e = coordinates(case_2)
x_p = int(x_p)
y_p = int(y_p)
x_e = int(x_e)
y_e = int(y_e)
x1,x2 = max(x_p,x_e),min(x_p,x_e)
y1,y2 = max(y_p,y_e),min(y_p,y_e)
distance = (x1 - x2) + (y1 - y2)
return distance
def hitbox(ship):
"""Calculate the hitbox of a ship based on its type the localisation of its center.
Parameters
----------
- ship: ship whose hitbox is asked for (str).
Returns
-------
- hitbox : list of coordinates that represent the ship (list).
Version
-------
- specification: Simon Defrenne (v.1 18/03/18)
- implementation: Simon Defrenne (v.1 18/03/18)
"""
ship_type = ship["type"]
x,y = coordinates(ship["hitbox"])
full_hitbox = []
full_hitbox.append([x,y])
if ship_type == "excavator-M" or ship_type == "excavator-L" or ship_type == "scout" or ship_type == "warship" or ship_type == "portal":
full_hitbox.append([x+1,y])
full_hitbox.append([x-1,y])
full_hitbox.append([x,y+1])
full_hitbox.append([x,y-1])
if ship_type == "excavator-L" or ship_type == "warship" or ship_type == "portal":
full_hitbox.append([x+2,y])
full_hitbox.append([x-2,y])
full_hitbox.append([x,y+2])
full_hitbox.append([x,y-2])
if ship_type == "scout" or ship_type == "warship" or ship_type == "portal":
full_hitbox.append([x+1,y+1])
full_hitbox.append([x-1,y-1])
full_hitbox.append([x+1,y-1])
full_hitbox.append([x-1,y+1])
if ship_type == "warship" or ship_type == "portal":
full_hitbox.append([x+1,y+2])
full_hitbox.append([x-1,y-2])
full_hitbox.append([x+1,y-2])
full_hitbox.append([x-1,y+2])
full_hitbox.append([x+2,y+1])
full_hitbox.append([x-2,y-1])
full_hitbox.append([x+2,y-1])
full_hitbox.append([x-2,y+1])
if ship_type == "portal":
full_hitbox.append([x+2,y+2])
full_hitbox.append([x-2,y-2])
full_hitbox.append([x+2,y-2])
full_hitbox.append([x-2,y+2])
return full_hitbox
def coordinates(list_coordinates):
""" Split a list of two numbers into one abscissa and one ordinate
Parameters
----------
- list_coordinates : list of two numbers, one abscissa and one ordinate (list)
Return
------
- x : rank (int)
- y : column (int)
Version
-------
- specification: Simon Defrenne (v.2 04/05/18)
- implementation: Simon Defrenne (v.1 03/03/18)
"""
x = list_coordinates[0]
y = list_coordinates[1]
return x,y
def attack(ship,target,fleet,enemy_fleet,store):
""" The attack of a ship against a target.
Parameters
----------
- ship : the name of the ship that attacks (dict)
- target : the targeted case (list)
- redfleet : the fleet of the red player (dict)
- bluefleet : the fleet of the blue player (dict)
- store: the data structure used to stock information on ships based on their types (dict)
Return
------
- redfleet : the fleet of the red player (dict)
- bluefleet : the fleet of the blue player (dict)
Version
-------
- specification: Simon Defrenne (v.1 18/03/18)
- implementation: Simon Defrenne (v.1 18/03/18)
"""
if ship in fleet:
if fleet[ship]["type"] == "scout" or fleet[ship]["type"] == "warship":
if manhattan_distance(fleet[ship]["hitbox"],target) <= store[fleet[ship]["type"]]["range"] and fleet[ship]["action"] == False:
target[0]= int(target[0])
target[1]= int(target[1])
for ships in fleet:
if target in hitbox(fleet[ships]):
fleet[ships]["health"] -= store[fleet[ship]["type"]]["attack"]
for ships in enemy_fleet:
if target in hitbox(enemy_fleet[ships]):
enemy_fleet[ships]["health"] -= store[fleet[ship]["type"]]["attack"]
fleet[ship]["action"] = True
return fleet,enemy_fleet
def buy(ship_name,ship_type,fleet,store):
""" Add a specific ship to the chosen fleet.
Parameters
----------
- ship_name : the chosen name of the ship (str)
- ship_type : the chosen type of the ship (str)
- fleet : the fleet in which a ship is added (dict)
- store: the data structure used to stock information on ships based on their types (dict)
Return
------
- fleet : the fleet in which a ship is added (dict)
Version
-------
- specification: Simon Defrenne (v.1 18/03/18)
- implementation: Simon Defrenne (v.1 18/03/18)
"""
if store[ship_type]["cost"] <= fleet["portal"]["ore"]:
fleet[ship_name] = {}
fleet[ship_name]["type"] = ship_type
fleet[ship_name]["health"] = store[ship_type]["health"]
fleet[ship_name]["hitbox"] = fleet["portal"]["hitbox"]
fleet[ship_name]["action"] = False
if "tonnage" in store[ship_type]:
fleet[ship_name]["tonnage"] = 0
fleet[ship_name]["lock"] = False
fleet["portal"]["ore"] -= store[ship_type]["cost"]
return fleet
def name_ships(ship_type,fleet):
""" Allows the IA to create names for a ship it will buy.
Parameters
----------
- ship_type : the chosen type of ship (str)
- fleet : the fleet in which a ship is added (dict)
Returns
-------
- ship_name: the name of the ship (str)
Version
-------
- specification: Simon Defrenne (v.1 18/03/18)
- implementation: Simon Defrenne, Marien (v.1 18/03/18)
"""
if ship_type == "scout":
ship_name_1 = "S"
elif ship_type == "warship":
ship_name_1 = "W"
elif ship_type == "excavator-S":
ship_name_1 = "E"
elif ship_type == "excavator-M":
ship_name_1 = "M"
elif ship_type == "excavator-L":
ship_name_1 = "L"
ship_name_1 = fleet["portal"]["identifiant"] + ship_name_1
right_name = False
ship_name_2 = 1
while not right_name:
ship_name = ship_name_1 + str(ship_name_2)
if ship_name in fleet:
ship_name_2 += 1
else:
right_name = True
return ship_name
def move(ship,fleet,target,mapsize):
""" Move a ship into the target destination.
Parameters
----------
- ship : the name of the ship that moves (dict)
- target : the targeted case (list)
- fleet : the fleet of player (dict)
- mapsize : list containing the number of rows and columns of the map (list)
Return
------
- fleet : the fleet of player (dict)
Version
-------
- specification: Simon Defrenne (v.1 23/03/18)
- implementation: Simon Defrenne (v.1 23/03/18)
"""
if ship in fleet:
movement = False
if manhattan_distance(fleet[ship]["hitbox"],target) <= 2:
if (not "tonnage" in fleet[ship]) or ("tonnage" in fleet[ship] and not fleet[ship]["lock"]):
s_type = fleet[ship]["type"]
if s_type == "warship" or s_type == "excavator-L":
if target[0] - 2 > 0:
movement = True
elif target[0] + 2 <= mapsize[0]:
movement = True
elif target[1] - 2 > 0:
movement = True
elif target[1] + 2 <= mapsize[1]:
movement = True
elif s_type == "scout" or s_type == "excavator-M":
if target[0] - 1 > 0:
movement = True
elif target[0] + 1 <= mapsize[0]:
movement = True
elif target[1] - 1 > 0:
movement = True
elif target[1] + 1 <= mapsize[1]:
movement = True
elif s_type == "excavator-S":
if target[0] > 0:
movement = True
elif target[0] <= mapsize[0]:
movement = True
elif target[1] > 0:
movement = True
elif target[1] <= mapsize[1]:
movement = True
if movement:
x_s,y_s = coordinates(fleet[ship]["hitbox"])
x_t,y_t = coordinates(target)
if (x_s == x_t or y_s == y_t) and manhattan_distance(fleet[ship]["hitbox"],target)==1:
fleet[ship]["hitbox"]=target
elif (x_s != x_t and y_s != y_t) and manhattan_distance(fleet[ship]["hitbox"],target)==2:
fleet[ship]["hitbox"]=target
fleet[ship]["action"] = True
return fleet
def locking(ship,order,fleet,asteroids):
""" Lock or unlock a excavator on a asteroid or its portal.
Parameters
----------
- ship : the name of the ship that locks/unlocks itself (dict)
- order : "lock" to lock a ship, "release" to unlock it (str)
- fleet : the fleet of the ship (dict)
- asteroids : list of asteroids (list)
Return
------
- fleet : the fleet of the ship (dict)
Version
-------
- specification: Simon Defrenne (v.1 18/03/18)
- implementation: Simon Defrenne (v.1 18/03/18)
"""
if ship in fleet:
if fleet[ship]["type"] == "excavator-S" or fleet[ship]["type"] == "excavator-M" or fleet[ship]["type"] == "excavator-L":
if order == "release":
fleet[ship]["lock"] = False
if fleet[ship]["hitbox"] == fleet["portal"]["hitbox"]:
fleet["portal"]["locked"].remove(ship)
else:
for asteroid in asteroids:
if fleet[ship]["hitbox"] == asteroid["hitbox"]:
asteroid["locked"].remove(ship)
elif order == "lock":
if fleet[ship]["hitbox"] == fleet["portal"]["hitbox"]:
fleet["portal"]["locked"].append(ship)
fleet[ship]["lock"] = True
else:
for asteroid in asteroids:
if fleet[ship]["hitbox"] == asteroid["hitbox"]:
fleet[ship]["lock"] = True
asteroid["locked"].append(ship)
return fleet,asteroids
def turn(order_r,order_b, redfleet, bluefleet, store, asteroids,mapsize):
""" Run a turn of the game based on the orders of the players.
Parameters
----------
- order_r : orders of the red player (str)
- order_b : orders of the blue player (str)
- redfleet : the fleet of the red player (dict)
- bluefleet : the fleet of the blue player (dict)
- asteroids : list of asteroids (list)
- store: the data structure used to stock information on ships based on their types (dict)
- mapsize : list containing the number of rows and columns of the map (list)
Return
------
- redfleet : the fleet of the red player (dict)
- bluefleet : the fleet of the blue player (dict)
- asteroids : list of asteroids (list)
Version
-------
- specification: Simon Defrenne (v.1 23/03/18)
- implementation: Simon Defrenne (v.1 23/03/18)
"""
# resolve every orders and the harvest of ore
r_attack_orders, r_move_orders, r_buy_orders, r_lock_orders = orders_prep(order_r)
b_attack_orders, b_move_orders, b_buy_orders, b_lock_orders = orders_prep(order_b)
redfleet = buy_resolution(r_buy_orders, redfleet,store)
bluefleet = buy_resolution(b_buy_orders, bluefleet,store)
redfleet,asteroids = locking_resolution(r_lock_orders, redfleet, asteroids)
bluefleet,asteroids = locking_resolution(b_lock_orders, bluefleet, asteroids)
redfleet = move_resolution(r_move_orders, redfleet,mapsize)
bluefleet = move_resolution(b_move_orders, bluefleet,mapsize)
redfleet,bluefleet = attack_resolution(r_attack_orders, redfleet, bluefleet, store, asteroids)
bluefleet,redfleet = attack_resolution(b_attack_orders, bluefleet, redfleet, store, asteroids)
if "portal" in redfleet and "portal" in bluefleet:
redfleet,bluefleet,asteroids = harvest(redfleet,bluefleet,asteroids,store)
# prepare the next turn
for ship in redfleet:
if redfleet[ship]["type"] != "portal":
redfleet[ship]["action"] = False
for ship in bluefleet:
if bluefleet[ship]["type"] != "portal":
bluefleet[ship]["action"] = False
return redfleet, bluefleet, asteroids
def orders_prep(list_orders):
""" Split the string of orders into four lists, based on the type of order.
Parameters
----------
List_orders: the sequence of order the player has given (str)
Return
------
attack_order: the attack orders (list)
move_order: the moving orders (list)
lock_order: the locking orders (list)
buy_order: the buying orders (list)
Version
-------
specification: Marien Dessy (v.1 10/04/18)
implementation: Simon Defrenne (v.1 04/04/18)
"""
list_orders = str.split(list_orders," ")
attack_orders = []
move_orders = []
buy_orders = []
lock_orders = []
for order in list_orders:
if "*" in order:
attack_orders.append(order)
elif "@" in order:
move_orders.append(order)
elif "lock" in order or "release" in order :
lock_orders.append(order)
elif "scout" in order or "warship" in order or "excavator-S" in order or "excavator-M" in order or "excavator-L" in order:
buy_orders.append(order)
return attack_orders, move_orders, buy_orders, lock_orders
def buy_resolution(buy_orders,fleet,store):
""" Resolve the buying orders of a player.
Parameters
----------
- buy_orders: the buying orders (list)
- fleet: the fleet of the player who give the order (dict)
- store: the data structure used to stock information on ships based on their types (dict
Return
------
- fleet: the fleet of the player who give the order (dict)
Version
-------
- specification: Marien Dessy (v.1 10/04/18)
- implementation: Simon Defrenne (v.1 04/04/18)
"""
for order in buy_orders:
fleet = buy(str.split(order,":")[0],str.split(order,":")[1],fleet,store)
return fleet
def locking_resolution(lock_orders,fleet,asteroids):
""" Resolve the locking orders of a player.
Parameters
----------
- lock_orders: the locking orders (list)
- fleet: the fleet of the player who give the order (dict)
- asteroids: the list of asteroids (list)
Return
------
- fleet: the fleet of the player who give the order (dict)
Version
-------
- specification: Marien Dessy (v.1 10/04/18)
- implementation: Simon Defrenne (v.1 04/04/18)
"""
for order in lock_orders:
fleet,asteroids = locking(str.split(order,":")[0],str.split(order,":")[1],fleet,asteroids)
return fleet,asteroids
def move_resolution(move_orders,fleet,mapsize):
""" Resolve the move orders of a player.
Parameters
----------
- move_orders: the buying orders (list)
- fleet: the fleet of the player who give the order (dict)
- mapsize : list containing the number of rows and columns of the map (list)
Return
------
- fleet: the fleet of the player who give the order (dict)
Version
-------
- specification: Marien Dessy (v.1 10/04/18)
- implementation: Simon Defrenne (v.1 04/04/18)
"""
for order in move_orders:
ship = str.split(order,":@")[0]
coordinates = str.split(order,":@")[1]
coordinates = [int(str.split(coordinates, "-")[0]),int(str.split(coordinates, "-")[1])]
fleet = move(ship,fleet,coordinates,mapsize)
return fleet
def attack_resolution(attack_orders,fleet,enemy_fleet,store,asteroids):
""" Resolve the attack orders of a player.
Parameters
----------
- buy_orders: the buying orders (list)
- fleet: the fleet of the player who give the order (dict)
- enemy_fleet: the fleet of the enemy of the preceding player (dict)
- asteroids: the list of asteroids (list)
Return
------
- fleet: the fleet of the player who give the order (dict)
- enemy_fleet: the fleet of the enemy of the preceding player (dict)
Version
-------
- specification: Marien Dessy (v.1 10/04/18)
- implementation: Simon Defrenne (v.1 04/04/18)
"""
for order in attack_orders:
ship = str.split(order,":*")[0]
coordinates_attack = str.split(order,":*")[1]
coordinates_attack_2 = str.split(coordinates_attack, "-")
coordinates = [coordinates_attack_2[0],coordinates_attack_2[1]]
fleet,enemy_fleet = attack(ship,coordinates,fleet,enemy_fleet,store)
# delete the destroyed ships
fleet_dead_ships = []
enemy_fleet_dead_ships = []
for ships in fleet:
if fleet[ships]["health"] <= 0:
fleet_dead_ships.append(ships)
for ships in enemy_fleet:
if enemy_fleet[ships]["health"] <= 0:
enemy_fleet_dead_ships.append(ships)
for ship in fleet_dead_ships:
if "lock" in fleet[ship] and fleet[ship]["lock"]:
if fleet[ship]["hitbox"] == fleet["portal"]["hitbox"] :
if ship in fleet["portal"]["locked"]:
index = fleet["portal"]["locked"].index(ship)
del fleet["portal"]["locked"][index]
else:
for asteroid in asteroids:
if ship in asteroid["locked"]:
index = asteroid["locked"].index(ship)
del asteroid["locked"][index]
del fleet[ship]
for ship in enemy_fleet_dead_ships:
if "lock" in enemy_fleet[ship] and enemy_fleet[ship]["lock"]:
if enemy_fleet[ship]["hitbox"] == enemy_fleet["portal"]["hitbox"] :
if ship in enemy_fleet["portal"]["locked"]:
index = enemy_fleet["portal"]["locked"].index(ship)
del enemy_fleet["portal"]["locked"][index]
else:
for asteroid in asteroids:
if ship in asteroid["locked"]:
index = asteroid["locked"].index(ship)
del asteroid["locked"][index]
del enemy_fleet[ship]
return fleet,enemy_fleet
def harvest(redfleet,bluefleet,asteroids,store):
""" Resolve the harvesting of locked ships.
Parameters
----------
- redfleet: the fleet of the red player (dict)
- bluefleet: the fleet of the blue player (dict)
- asteroids: the list of asteroids (list)
- store: the data structure used to stock information on ships based on their types (dict)
Return
------
- fleet: the fleet of the player who give the order (dict)
- enemy_fleet: the fleet of the enemy of the preceding player (dict)
Version
-------
- specification: Marien Dessy (v.1 10/04/18)
- implementation: Simon Defrenne (v.1 04/04/18)
"""
for asteroid in asteroids:
if asteroid["locked"] != []:
red_lock = []
blue_lock = []
for ship in asteroid["locked"]:
if ship in redfleet:
red_lock.append(ship)
elif ship in bluefleet:
blue_lock.append(ship)
potential_harvest = len(asteroid["locked"]) * asteroid["harvest"]
if potential_harvest > asteroid["ore"]:
potential_harvest = asteroid["ore"]
ship_harvest = potential_harvest/len(asteroid["locked"])
for ship in red_lock:
tonnage = store[redfleet[ship]["type"]]["tonnage"]
carried_weight = redfleet[ship]["tonnage"]
if tonnage - carried_weight < ship_harvest:
redfleet[ship]["tonnage"] += tonnage - carried_weight
asteroid["ore"] -= tonnage - carried_weight
else:
redfleet[ship]["tonnage"] += ship_harvest
asteroid["ore"] -= ship_harvest
for ship in blue_lock:
tonnage = store[bluefleet[ship]["type"]]["tonnage"]
carried_weight = bluefleet[ship]["tonnage"]
if tonnage - carried_weight < ship_harvest:
bluefleet[ship]["tonnage"] += tonnage - carried_weight
asteroid["ore"] -= tonnage - carried_weight
else:
bluefleet[ship]["tonnage"] += ship_harvest
asteroid["ore"] -= ship_harvest
for ship in redfleet["portal"]["locked"]:
redfleet["portal"]["ore"] += redfleet[ship]["tonnage"]
redfleet["portal"]["score"] += redfleet[ship]["tonnage"]
redfleet[ship]["tonnage"] -= redfleet[ship]["tonnage"]
for ship in bluefleet["portal"]["locked"]:
bluefleet["portal"]["ore"] += bluefleet[ship]["tonnage"]
bluefleet["portal"]["score"] += bluefleet[ship]["tonnage"]
bluefleet[ship]["tonnage"] -= bluefleet[ship]["tonnage"]
return redfleet,bluefleet,asteroids
def IA_buy(IA_fleet,enemy_fleet,store):
""" Make the IA buy a new ship and name it to add this ship in his fleet.
Parameters
----------
- IA_fleet: the fleet of the IA (dict)
- store: the database which contain all information about the ship's stats (dict)
- Return
------
- order: the buy order (str)
Version
-------
- specification: Marien Dessy (v.2 27/04/18)
- implementation: Marien Dessy, Simon Defrenne (v.4 04/05/18)
"""
ship_count = {}
ship_count["excavator-S"] = 0
ship_count["excavator-M"] = 0
ship_count["excavator-L"] = 0
ship_count["scout"] = 0
ship_count["warship"] = 0
for ship in IA_fleet:
if ship != "portal":
s_type = IA_fleet[ship]["type"]
ship_count[s_type] +=1
order = ""
buy = True
stock = IA_fleet["portal"]["ore"]
score = IA_fleet["portal"]["score"]
ship_to_buy = {}
while buy:
if ship_count["excavator-M"] < 2:
type_to_buy = "excavator-M"
ship_count["excavator-M"] += 1
stock -= 2
elif ship_count["scout"] < 2 and score >= 8 and stock >= 3:
type_to_buy = "scout"
ship_count["scout"] += 1
stock -= 3
elif score >= 16 and stock >=9:
type_to_buy = "warship"
stock -= 9
elif ship_count["excavator-S"] < 1 and score >= 8 and stock>=1:
type_to_buy = "excavator-S"
ship_count["excavator-S"] += 1
stock -= 1
elif ship_count["excavator-S"] < 2 and score >= 24 and stock>=1:
type_to_buy = "excavator-S"
ship_count["excavator-S"] += 1
stock -= 1
else:
buy = False
if buy:
name = name_ships(type_to_buy,IA_fleet)
while name in ship_to_buy:
name = name[0:2] + str(int(name[2:])+1)
ship_to_buy[name] = type_to_buy
for ship in ship_to_buy:
order += ship + ":" + ship_to_buy[ship] + " "
return order
def calculate_trajectory(test,objectives,choice=True):
""" Calculate the closest or furthest cases in a list from another defined case.
Parameters
----------
- test : the case that is compared (list)
- objective : the list in which we look for the closest case (list)
- choice : True for the closest cases, False for the furthest (bool)
Return
------
- target: one of the closest possibles points (list)
Version
-------
specification: Simon Defrenne (v.1 27/04/18)
implementation: Simon Defrenne (v.1 27/04/18)
"""
target = []
possible_distance = {}
tested_distance = None
for objective in objectives:
tested_distance = calculate_turn_distance(test,objective)
if not tested_distance in possible_distance:
possible_distance[tested_distance] = []
possible_distance[tested_distance].append(objective)
possible_distance_2 = None
for distance in possible_distance:
if choice:
if possible_distance_2 == None or possible_distance_2 > distance:
possible_distance_2 = distance
else:
if possible_distance_2 == None or possible_distance_2 < distance:
possible_distance_2 = distance
target = possible_distance[possible_distance_2]
return target
def calculate_turn_distance(case_1,case_2):
""" Calculate the number of required turns to go between two case on the field.
Parameters
----------
- case_1 : a particular case on the field (list)
- case_2 : a particular case on the field (list)
Return
------
- distance: the distance between the two case (int)
Version
-------
- specification: Simon Defrenne (v.1 18/03/18)
- implementation: Simon Defrenne (v.1 18/03/18)
"""
x1,y1 = coordinates(case_1)
x2,y2 = coordinates(case_2)
distance = max(max(x1-x2,x2-x1),max(y1-y2,y2-y1))
return distance
def overlay_trap(ship, IA_fleet,enemy_fleet,possible_moves,objectives):
""" Make a ship go into an opponent battleship
Parameters
----------
- ship: the ship not ordered yet (dict)
- IA_fleet: the fleet of the IA (dict)
- enemy_fleet : the fleet of the opposing player (dict)
- possibles_moves : doable moves by the ship (list)
- objectives : objectives of the ship (list)
Return
------
- objectives : objectives of the ship (list)
Version
-------
- specification: Simon Defrenne (v.1 06/05/18)
- implementation: Simon Defrenne (v.1 06/05/18)
"""
battleships = []
for e_ship in enemy_fleet:
if enemy_fleet[e_ship]["type"] == "scout" or enemy_fleet[e_ship]["type"] == "warship":
battleships.append(e_ship)
for e_ship in battleships:
overlay = False
if IA_fleet[ship]["hitbox"] in hitbox(enemy_fleet[e_ship]):
if not overlay:
objectives = []
overlay = True
for hitbox_s in hitbox(enemy_fleet[e_ship]):
if hitbox_s in possible_moves:
objectives.append(hitbox_s)
elif not overlay:
objectives.append(enemy_fleet[e_ship]["hitbox"])
if objectives == []:
objectives.append(enemy_fleet["portal"]["hitbox"])
return objectives
def IA_move(ship_dict,IA_fleet,enemy_fleet,asteroids,store,mapsize):
""" Generate move orders for a ship of the IA.
Parameters
----------
- ship_list: the list of ships not ordered yet (dict)
- IA_fleet: the fleet of the IA (dict)
- enemy_fleet : the fleet of the opposing player (dict)
- asteroids : the list of asteroids (list)
- store: the data structure used to stock information on ships based on their types (dict)
- mapsize : list containing the number of rows and columns of the map (list)
Return
------
- order: the move order (str)
Version
-------
specification: Marien Dessy, Simon Defrenne (v.3 04/05/18)
implementation: Marien Dessy, Simon Defrenne (v.4 06/05/18)
"""
order = ""
for ship in ship_dict:
# set up
x_s,y_s = IA_fleet[ship]["hitbox"]
possible_moves = []
possible_moves.append([x_s+1,y_s])
possible_moves.append([x_s+1,y_s+1])
possible_moves.append([x_s,y_s+1])
possible_moves.append([x_s-1,y_s])
possible_moves.append([x_s,y_s-1])
possible_moves.append([x_s-1,y_s-1])
possible_moves.append([x_s+1,y_s-1])
possible_moves.append([x_s-1,y_s+1])
objectives = []
# calculating objectives of each ship
if IA_fleet[ship]["type"] == "excavator-M" or IA_fleet[ship]["type"] == "excavator-L":
if IA_fleet[ship]["tonnage"] == 0:
for asteroid in asteroids:
if asteroid["ore"] > 0:
objectives.append(asteroid["hitbox"])
elif IA_fleet[ship]["tonnage"] == store[IA_fleet[ship]["type"]]["tonnage"]:
objectives.append(IA_fleet["portal"]["hitbox"])
elif IA_fleet[ship]["tonnage"] < store[IA_fleet[ship]["type"]]["tonnage"]:
tested_distance = 0
total_distance = 0
for asteroid in asteroids:
if asteroid["ore"] > 0:
distance_a_s = calculate_turn_distance(asteroid["hitbox"],IA_fleet[ship]["hitbox"])
distance_a_p = calculate_turn_distance(asteroid["hitbox"],IA_fleet["portal"]["hitbox"])
tested_distance = distance_a_s + distance_a_p
if tested_distance < total_distance:
total_distance = distance_a_s + distance_a_p
objectives = []
objectives.append(asteroid["hitbox"])
if objectives == []:
objectives = overlay_trap(ship, IA_fleet,enemy_fleet,possible_moves,objectives)
elif IA_fleet[ship]["type"] == "scout":
for e_ship in enemy_fleet:
if enemy_fleet[e_ship]["type"] == "excavator-M" or enemy_fleet[e_ship]["type"] == "excavator-L":
objectives.append(enemy_fleet[e_ship]["hitbox"])
if objectives == []:
x_p, y_p = enemy_fleet["portal"]["hitbox"]
objectives.append(x_p + 3, y_p)
objectives.append(x_p + 2, y_p + 1)
objectives.append(x_p + 2, y_p - 1)
objectives.append(x_p + 1, y_p + 2)
objectives.append(x_p + 1, y_p - 2)
objectives.append(x_p, y_p)
objectives.append(x_p, y_p)
objectives.append(x_p - 1, y_p + 2)
objectives.append(x_p - 1, y_p - 2)
objectives.append(x_p - 2, y_p + 1)
objectives.append(x_p - 2, y_p - 1)
objectives.append(x_p - 3, y_p)
elif IA_fleet[ship]["type"] == "warship":
objectives.append(enemy_fleet["portal"]["hitbox"])
elif IA_fleet[ship]["type"] == "excavator-S":
objectives = overlay_trap(ship, IA_fleet,enemy_fleet,possible_moves,objectives)
target = calculate_trajectory(IA_fleet[ship]["hitbox"],objectives)
target = random.choice(target)
possible_moves_2 = calculate_trajectory(target,possible_moves)
x_final,y_final = random.choice(possible_moves_2)
# correction of trajectory if needed
if x_final <= 0:
x_final += 2
elif x_final >= mapsize[0]:
x_final -= 2
if y_final <= 0:
y_final += 2
elif y_final >= mapsize[1]:
y_final -= 2
# adding the order the string
order += ship + ":@" + str(x_final) + "-" + str(y_final) + " "
# return the move order
return order
def target(ally_ship,enemy_fleet,store):
""" The Artificial Intelligence choose a target for one of its ships.
Parameters
----------
- ally_ship : the ship that is checked to choose a target (dict)
- enemy_fleet : the fleet of the enemy (dict)
- store: the data structure used to stock information on ships based on their types (dict)
Return
------
- target: the targeted case on which the ship shoots (list)
Version
-------
- specification: Simon Defrenne (v.1 09/03/18)
- implementation: Simon Defrenne, Marien Dessy (v.2 04/05/18)
"""
list_targets = []
target = []
s_range = store[ally_ship["type"]]["range"]
for ship in enemy_fleet:
distance = manhattan_distance(ally_ship["hitbox"],enemy_fleet[ship]["hitbox"])
center_check = distance <= s_range
type_check = enemy_fleet[ship]["type"] != "excavator-S"
if center_check and type_check:
list_targets.append(ship)
if list_targets != []:
health_test = None
for ship in list_targets:
if health_test == None or health_test < enemy_fleet[ship]["health"]:
health_test = enemy_fleet[ship]["health"]
target = enemy_fleet[ship]["hitbox"]
return target
return None
def IA_attack(ship_dict,IA_fleet,enemy_fleet,store):
""" The IA choose randomly a ship to attack.
Parameters
----------
- ship_dict: the dictionnary of ships not ordered yet (dict)
- IA_fleet: the fleet of the IA (dict)
- fleet: the fleet of the player (dict)
- store: the data structure used to stock information on ships based on their types (dict)
Return
------
- order: the attack orders
Version
-------
- specification: Marien Dessy (v.1 11/04/18)
- implementation: Marien Dessy, Simon Defrenne (v.1 11/04/18)
"""
order = ""
battleships = []
for ship in ship_dict:
if ship != "portal" and (not "tonnage" in IA_fleet[ship]):
battleships.append(ship)
for ship in battleships:
attacked_case = target(IA_fleet[ship],enemy_fleet,store)
if attacked_case != None and not(attacked_case in hitbox(IA_fleet[ship])):
order += str(ship + ":*" + str(attacked_case[0]) + "-" + str(attacked_case[1]) + " ")
return order
def IA_locking(ship_dict,IA_fleet,asteroids,store):
""" The IA choose randomly a ship to attack.
Parameters
----------
- ship_list: the list of ships not ordered yet (dict)
- IA_fleet: the fleet of the IA (dict)
- asteroids: the fleet of the player (dict)
- store: the data structure used to stock information on ships based on their types (dict)
Return
------
- order: the locking orders
Version
-------
- specification: Marien Dessy (v.1 11/04/18)
- implementation: Marien Dessy (v.1 11/04/18)
"""
excavators = {}
for ship in ship_dict:
if "tonnage" in IA_fleet[ship]:
excavators[ship] = IA_fleet[ship]
locked_excavators = {}
unlocked_excavators = {}
for ship in excavators:
if excavators[ship]["lock"]:
locked_excavators[ship]=excavators[ship]
else:
unlocked_excavators[ship]=excavators[ship]
order = ""
for ship in locked_excavators:
s_type = locked_excavators[ship]["type"]
if locked_excavators[ship]["tonnage"] == 0 and locked_excavators[ship]["hitbox"] == IA_fleet["portal"]["hitbox"]:
order += ship + ":release "
else :
for asteroid in asteroids:
if ship in asteroid["locked"]:
if asteroid["ore"] <= 0 or locked_excavators[ship]["tonnage"] == store[s_type]["tonnage"]:
order += ship + ":release "
for ship in unlocked_excavators:
s_type = unlocked_excavators[ship]["type"]
for asteroid in asteroids:
ore_check = asteroid["ore"] > 0
hitbox_check = unlocked_excavators[ship]["hitbox"] == asteroid["hitbox"]
tonnage_check = excavators[ship]["tonnage"] < store[s_type]["tonnage"]
if ore_check and hitbox_check and tonnage_check:
order += ship + ":lock "
if unlocked_excavators[ship]["hitbox"] == IA_fleet["portal"]["hitbox"] and excavators[ship]["tonnage"] > 0:
order += ship + ":lock "
return order
def IA_complete(fleet,enemy_fleet,asteroids,store,mapsize):
""" Generate the orders for the IA.
Parameters
----------
- fleet: the fleet that will be used by the IA (dict)
- enemy_fleet: the fleet of the enemy (dict)
- asteroids: the fleet of the player (dict)
- store: the data structure used to stock information on ships based on their types (dict)
Return
------
- order: the order given by the IA (str)
Version
-------
- specification: Marien Dessy (v.1 12/04/18)
"""
order = ""
ship_dict = {}
for ship in fleet:
if ship != "portal":
ship_dict[ship] = None
order += IA_buy(fleet,enemy_fleet,store)
order += IA_locking(ship_dict,fleet,asteroids,store)
for ship in fleet:
if ship in order:
del ship_dict[ship]
order += IA_attack(ship_dict,fleet,enemy_fleet,store)
for ship in fleet:
if ship in order and ship in ship_dict:
del ship_dict[ship]
order += IA_move(ship_dict,fleet,enemy_fleet,asteroids,store,mapsize)
order = str.strip(order)
return order
# Gui framework
# ==============================================================================
# framework for easy user interface creation.
# Canvas creation and printing.
# ------------------------------------------------------------------------------
# Create and print a canvas in the user console.
def create_canvas(width, height, enable_color = True):
"""
Create a new char canvas.
Parameters
----------
height: height of the game view (int).
width: width of the game view (int).
enable_color: enable color in the game view (bool)
Return
------
canvas: 2D ascii canvas (dic).
"""
# Initialize the canvas.
canvas = {'size': (width, height), 'color': enable_color, 'grid': {}}
# Create canvas's tiles.
for x in range(width):
for y in range(height):
canvas['grid'][(x,y)] = {'color':None, 'back_color':None, 'char':' '}
return canvas
def print_canvas(canvas, x = 0, y = 0):
"""
Print canvas in the terminal.
Parameters
----------
canvas: canvas to print on screen (dic).
(optional) x, y: coodinate in the terminal (int).
"""
canvas_width = canvas['size'][0]
canvas_height = canvas['size'][1]
# Hide and set cursor coordinates.
line = '\033[?25l'
for y in range(canvas_height):
for x in range(canvas_width):
# Get coordinate.
grid_item = canvas['grid'][(x,y)]
# Get coordinate information.
char = grid_item['char']
color = grid_item['color']
back_color = grid_item['back_color']
if (canvas['color']):
line = line + set_color(char, color, back_color)
else:
line = line + char
line += '\n'
# Print, remove the laste \n et reset the print cursor..
print(line[:-1] + '\033[?25h')
# Canvas drawing.
# ------------------------------------------------------------------------------
# All tools and brush to draw on the canvas.
def put(canvas, x, y, char, color = None, back_color = None):
"""
Put a character in the canvas.
Parameters
----------
canvas: canvas to draw in (dic).
x, y: coordinate of were to put the char (int).
char: char to put (str).
(optiona) color, back_color: color for the char (string).
Return
------
canvas: canvas with the char put on it (dic).
"""
# Check if the coordinate is in the bound of the canvas.
if x < canvas['size'][0] and x >= 0 and\
y < canvas['size'][1] and y >= 0:
# Put the char a the coordinate.
canvas['grid'][(x,y)]['char'] = char
canvas['grid'][(x,y)]['color'] = color
canvas['grid'][(x,y)]['back_color'] = back_color
return canvas
def put_ship(canvas, x, y, char, color = None, back_color = None):
"""
Put function, but for ships.
Parameters
----------
canvas: canvas to draw in (dic).
x, y: coordinate of were to put the char (int).
char: char to put (str).
(optiona) color, back_color: color for the char (string).
Return
------
canvas: canvas with the char put on it (dic).
"""
x -= 1
y -= 1
# Check if the coordinate is in the bound of the canvas.
if x < canvas['size'][0] and x >= 0 and\
y < canvas['size'][1] and y >= 0:
# Put the char a the coordinate.
canvas['grid'][(x,y)]['char'] = char
canvas['grid'][(x,y)]['color'] = color
canvas['grid'][(x,y)]['back_color'] = back_color
return canvas
def put_canvas(canvas, canvas_bis, x, y):
"""
Put a canvas in the canvas.
Parameters
----------
canvas: canvas to draw in (dic).
canvas_bis: canvas to put in the main canvas (dic).
x, y: coordinate of the canvas (int).
Return
------
canvas: the canvas with the other canvas on it (dic).
"""
for cx in range(canvas_bis['size'][0]):
for cy in range(canvas_bis['size'][1]):
char = canvas_bis['grid'][(cx, cy)]
canvas = put(canvas, cx + x, cy + y, char['char'], char['color'], char['back_color'])
return canvas
def put_window(canvas, window_content, title, x, y, style="double", color = None, back_color = None):
"""
Put a window with a windows content in the main canvas.
Parameters
----------
canvas: canvas to draw in (dic).
window_content: content of the window (dic).
title: title of the window (str).
x, y: coordinate of the window (int).
(optional) style: Style of the window (str).
Return
------
canvas: the canvas with the window on it (dic).
"""
c = create_canvas(window_content["size"][0] + 2, window_content["size"][1] + 2, True)
c = put_canvas(c, window_content, 1, 1)
c = put_box(c, 0, 0, window_content["size"][0] + 2, window_content["size"][1] + 2, style)
c = put_text(c, 1, 0, "| %s |" % title, color, back_color)
canvas = put_canvas(canvas, c, x, y)
return canvas
def put_box(canvas, x, y, width, height, mode = 'double', color = None, back_color = None):
"""
Put a box in the canvas.
Parameters
----------
canvas: canvas to draw in (dic).
x, y: coordinate of the rectangle (int).
width, height: size of the rectangle (int).
mode: double ou single line <'double'|'single'> (str).
color, back_color: color for the char (string).
Return
------
canvas: canvas whith the box (dic).
"""
rect_char = ()
if mode == 'double':
rect_char = ('═', '║', '╔', '╚', '╗', '╝')
elif mode == 'single':
rect_char = ('─', '│', '┌', '└', '┐', '┘')
# Put borders.
put_rectangle(canvas, x, y, width, 1, rect_char[0], color, back_color)
put_rectangle(canvas, x, y + height - 1, width, 1, rect_char[0], color, back_color)
put_rectangle(canvas, x, y, 1, height, rect_char[1], color, back_color)
put_rectangle(canvas, x + width - 1, y, 1, height, rect_char[1], color, back_color)
# Put corners.
put(canvas, x, y, rect_char[2], color, back_color)
put(canvas, x, y + height - 1, rect_char[3], color, back_color)
put(canvas, x + width - 1, y, rect_char[4], color, back_color)
put(canvas, x + width - 1, y + height - 1, rect_char[5], color, back_color)
return canvas
def put_rectangle(canvas, x, y, width, height, char, color = None, back_color = None):
"""
Put a filled rectangle in the canvas.
Parameters
----------
canvas: canvas to draw in (dic).
x, y: coordinate of the rectangle (int).
width, height: size of the rectangle (int).
color, back_color: color for the char (string).
Return
------
canvas: canvas whith the rectangle (dic).
"""
for w in range(width):
for h in range(height): canvas = put(canvas, x + w, y + h, char, color, back_color)
return canvas
def put_text(canvas, x, y, text, color = None, back_color = None):
"""
Put a text in the canvas.
Parameters
----------
canvas: canvas to draw in (dic).
x, y: coordinate of the string (int).
direction_x, direction_y: direction to draw the string (int).
Return
------
canvas: game view with the new string (dic).
Notes
-----
direction_x, direction_y: Muste be -1, 0 or 1.
"""
for char in text:
canvas = put(canvas, x, y, char, color, back_color)
x += 1
y += 0
return canvas
def set_color(text, foreground_color, background_color):
"""
Change the color of a text.
Parameters
----------
text: string to color (str).
fore_color: name of the foreground color (str).
back_color: name of the background color (str).
Return
------
colored_text: colored string (str).
Notes
-----
Colors: grey, red, green, yellow, blue, magenta, cyan, white.
ANSI color escape sequences: http://ascii-table.com/ansi-escape-sequences.php
"""
color = { 'grey': 0, 'red': 1, 'green': 2, 'yellow': 3, 'blue': 4, 'magenta': 5, 'cyan': 6, 'white': 7 }
reset = '\033[0m'
format_string = '\033[%dm%s'
if foreground_color is not None: text = format_string % (color[foreground_color] + 30, text)
if background_color is not None: text = format_string % (color[background_color] + 40, text)
text += reset
return text
def slide_animation(canvas_foreground, canvas_background):
"""
"""
out_canvas = create_canvas(canvas_background['size'][0], canvas_background['size'][1])
slide_value = 0
while slide_value <= canvas_background['size'][1]:
put_canvas(out_canvas, canvas_background, 0, 0)
put_canvas(out_canvas, canvas_foreground, 0, 0 - slide_value)
print_canvas(out_canvas)
slide_value += 10
def show_game(red_team, blue_team, asteroids, mapsize, store):
"""
show the UI for the game
parameters
----------
red_team: show the red team
blue_team: show the blue team
asteroids: show the asteroids
mapsize: give the size for the visual map
store: show the store
Version
-------
specification: Alexis Losenko (v.1 6/05/18)
implementation: Alexis Losenko(v.1 01/05/18)
"""
X = 0
Y = 1
tsize = shutil.get_terminal_size((90, 60))
tsize = (tsize[X]-1, tsize[Y]-1)
c = create_canvas(tsize[X] , tsize[Y])
game_window = create_canvas(*mapsize)
shop_window = create_canvas(20, len(store) * 2)
red_window = render_team_window(red_team)
blue_window = render_team_window(blue_team)
for asteroid in asteroids:
put_ship(game_window, *asteroid["hitbox"], "o", "magenta")
#Now, it's the position of Portal
for h in hitbox(blue_team["portal"]):
put_ship(game_window, *h, "◘", "blue")
for h in hitbox(red_team["portal"]):
put_ship(game_window, *h, "◘", "red")
#go to show every ship
for ship in red_team:
if ship != "portal":
for h in hitbox(red_team[ship]):
put_ship(game_window, *h, "■", "red")
put_ship(game_window, *red_team[ship]["hitbox"], "X", "red")
for ship in blue_team:
if ship != "portal":
for h in hitbox(blue_team[ship]):
put_ship(game_window, *h, "■", "blue")
put_ship(game_window, *blue_team[ship]["hitbox"], "X", "blue")
line = 0
for type in store:
name = type
type = store[type]
put_text(shop_window, 0, line * 2, name, "yellow")
if "excavator" in name:
put_text(shop_window, 0, line * 2+1, " P:%i T:%i" % (type["cost"], type["tonnage"]))
else:
put_text(shop_window, 0, line * 2+1, " P:%i A:%i" % (type["cost"], type["attack"]))
line += 1
origin = (tsize[X] // 2 - game_window["size"][X] // 2, tsize[Y] // 2 - game_window["size"][Y] // 2)
put_window(c, game_window, "Mining War", *origin)
put_window(c, shop_window, "Shop", origin[X] - red_window["size"][X] - 2, origin[Y] + red_window["size"][Y] + 2)
put_window(c, red_window, "Red", origin[X] - red_window["size"][X] - 2, origin[Y], "double", "red")
put_window(c, blue_window, "Blue", origin[X] + game_window["size"][X] + 2, origin[Y], "double", "blue")
print_canvas(c)
def render_team_window(team):
"""
show the text for each ships and detail
parameters
----------
team: take the information of each team to show you in window
return
------
return the game window
"""
X = 0
Y = 1
team_window = create_canvas(20, 20)
line = 0
for ship in team:
name = ship
if ship != "portal":
ship = team[ship]
position = ship["hitbox"]
if ( "excavator" in ship["type"]):
put_text(team_window, 0, line, "%s %i-%i T:%i H:%i" % (name, position[X] + 1, position[Y] + 1, ship["tonnage"], ship["health"]))
else:
put_text(team_window, 0, line, "%s %i-%i H:%i" % (name, position[X] + 1, position[Y] + 1, ship["health"]))
line += 1
return team_window
def get_IP():
"""Returns the IP of the computer where get_IP is called.
Returns
-------
computer_IP: IP of the computer where get_IP is called (str)
Notes
-----
If you have no internet connection, your IP will be 127.0.0.1.
This IP address refers to the local host, i.e. your computer.
"""
return socket.gethostbyname(socket.gethostname())
def connect_to_player(player_id, remote_IP='127.0.0.1', verbose=False):
"""Initialise communication with remote player.
Parameters
----------
player_id: player id of the remote player, 1 or 2 (int)
remote_IP: IP of the computer where remote player is (str, optional)
verbose: True only if connection progress must be displayed (bool, optional)
Returns
-------
connection: sockets to receive/send orders (tuple)
Notes
-----
Initialisation can take several seconds. The function only
returns after connection has been initialised by both players.
Use the default value of remote_IP if the remote player is running on
the same machine. Otherwise, indicate the IP where the other player
is running with remote_IP. On most systems, the IP of a computer
can be obtained by calling the get_IP function on that computer.
"""
# init verbose display
if verbose:
print('\n-------------------------------------------------------------')
# open socket (as server) to receive orders
socket_in = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket_in.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # deal with a socket in TIME_WAIT state
if remote_IP == '127.0.0.1':
local_IP = '127.0.0.1'
else:
local_IP = get_IP()
local_port = 42000 + (3-player_id)
try:
if verbose:
print('binding on %s:%d to receive orders from player %d...' % (local_IP, local_port, player_id))
socket_in.bind((local_IP, local_port))
except:
local_port = 42000 + 100+ (3-player_id)
if verbose:
print(' referee detected, binding instead on %s:%d...' % (local_IP, local_port))
socket_in.bind((local_IP, local_port))
socket_in.listen(1)
if verbose:
print(' done -> now waiting for a connection on %s:%d\n' % (local_IP, local_port))
# open client socket used to send orders
socket_out = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket_out.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # deal with a socket in TIME_WAIT state
remote_port = 42000 + player_id
connected = False
msg_shown = False
while not connected:
try:
if verbose and not msg_shown:
print('connecting on %s:%d to send orders to player %d...' % (remote_IP, remote_port, player_id))
socket_out.connect((remote_IP, remote_port))
connected = True
if verbose:
print(' done -> now sending orders to player %d on %s:%d' % (player_id, remote_IP, remote_port))
except:
if verbose and not msg_shown:
print(' connection failed -> will try again every 100 msec...')
time.sleep(.1)
msg_shown = True
if verbose:
print()
# accept connection to the server socket to receive orders from remote player
socket_in, remote_address = socket_in.accept()
if verbose:
print('now listening to orders from player %d' % (player_id))
# end verbose display
if verbose:
print('\nconnection to remote player %d successful\n-------------------------------------------------------------\n' % player_id)
# return sockets for further use
return (socket_in, socket_out)
def disconnect_from_player(connection):
"""End communication with remote player.
Parameters
----------
connection: sockets to receive/send orders (tuple)
"""
# get sockets
socket_in = connection[0]
socket_out = connection[1]
# shutdown sockets
socket_in.shutdown(socket.SHUT_RDWR)
socket_out.shutdown(socket.SHUT_RDWR)
# close sockets
socket_in.close()
socket_out.close()
def notify_remote_orders(connection, orders):
"""Notifies orders of the local player to a remote player.
Parameters
----------
connection: sockets to receive/send orders (tuple)
orders: orders of the local player (str)
Raises
------
IOError: if remote player cannot be reached
"""
# get sockets
socket_in = connection[0]
socket_out = connection[1]
# deal with null orders (empty string)
if orders == '':
orders = 'null'
# send orders
try:
socket_out.sendall(orders.encode())
except:
raise IOError('remote player cannot be reached')
def get_remote_orders(connection):
"""Returns orders from a remote player.
Parameters
----------
connection: sockets to receive/send orders (tuple)
Returns
----------
player_orders: orders given by remote player (str)
Raises
------
IOError: if remote player cannot be reached
"""
# get sockets
socket_in = connection[0]
socket_out = connection[1]
# receive orders
try:
orders = socket_in.recv(65536).decode()
except:
raise IOError('remote player cannot be reached')
# deal with null orders
if orders == 'null':
orders = ''
return orders
def game(path, player_id, remote_IP='127.0.0.1', verbose=False):
""" Run the game.
Parameters
----------
path: the file of the map (str)
Version
-------
specification: Simon Defrenne (v.1 20/04/18)
implementation: Simon Defrenne, Marien Dessy, Alexis Losenko (v.1 20-04-18)
"""
redfleet,bluefleet,asteroids,store,mapsize = game_preparation(path)
turn_count = 0
game = True
no_damage_check = False
no_damage_count = 0
connection = connect_to_player(player_id, remote_IP, verbose)
while game:
redfleet_health_data = {}
for ship in redfleet:
redfleet_health_data[ship] = redfleet[ship]["health"]
bluefleet_health_data = {}
for ship in bluefleet:
bluefleet_health_data[ship] = bluefleet[ship]["health"]
if player_id == 2:
player_red = IA_complete(redfleet,bluefleet,asteroids,store,mapsize)
notify_remote_orders(connection,player_red)
player_blue = get_remote_orders(connection)
elif player_id == 1:
player_blue = IA_complete(bluefleet,redfleet,asteroids,store,mapsize)
player_red = get_remote_orders(connection)
notify_remote_orders(connection,player_blue)
redfleet, bluefleet, asteroids = turn(player_red, player_blue, redfleet, bluefleet, store, asteroids,mapsize)
turn_count += 1
show_game(redfleet, bluefleet, asteroids, mapsize, store)
# check if something has been damaged
for ship in redfleet:
if ship in redfleet_health_data:
if redfleet_health_data[ship] != redfleet[ship]["health"]:
no_damage_check = True
for ship in bluefleet:
if ship in bluefleet_health_data:
if bluefleet_health_data[ship] != bluefleet[ship]["health"]:
no_damage_check = True
if no_damage_check:
no_damage_count += 1
else:
no_damage_count = 0
# win condition check
if not "portal" in redfleet:
game = False
print("Red player wins.")
disconnect_from_player(connection)
elif not "portal" in bluefleet:
print("Blue player wins.")
game = False
disconnect_from_player(connection)
elif no_damage_count >= 200:
if redfleet["portal"]["health"] > bluefleet["portal"]["health"]:
print("Red player wins.")
elif redfleet["portal"]["health"] < bluefleet["portal"]["health"]:
print("Blue player wins.")
else:
if redfleet["portal"]["score"] > bluefleet["portal"]["score"]:
print("Red player wins.")
elif redfleet["portal"]["score"] < bluefleet["portal"]["score"]:
print("Blue player wins.")
else:
print("DRAW")
game = False
disconnect_from_player(connection)
time.sleep(0.5)
| mit | -815,545,003,429,439,700 | -1,896,291,917,760,716,000 | 30.716457 | 177 | 0.546866 | false |
allen-fdes/python_demo | cookbook/settings.py | 1 | 2691 | """
Django settings for cookbook project.
Generated by 'django-admin startproject' using Django 1.8.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '547bugh8va($^u5aq+wo12ua0ll_k!rnxj!$)wf*uj+jo$vl+w'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'polls',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'cookbook.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR,'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'cookbook.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
| mit | -2,295,290,581,976,753,000 | 7,705,064,073,056,657,000 | 25.126214 | 71 | 0.691564 | false |
bpgc-cte/python2017 | Week 7/django/lib/python3.6/site-packages/pip/_vendor/cachecontrol/serialize.py | 326 | 6536 | import base64
import io
import json
import zlib
from pip._vendor.requests.structures import CaseInsensitiveDict
from .compat import HTTPResponse, pickle, text_type
def _b64_encode_bytes(b):
return base64.b64encode(b).decode("ascii")
def _b64_encode_str(s):
return _b64_encode_bytes(s.encode("utf8"))
def _b64_encode(s):
if isinstance(s, text_type):
return _b64_encode_str(s)
return _b64_encode_bytes(s)
def _b64_decode_bytes(b):
return base64.b64decode(b.encode("ascii"))
def _b64_decode_str(s):
return _b64_decode_bytes(s).decode("utf8")
class Serializer(object):
def dumps(self, request, response, body=None):
response_headers = CaseInsensitiveDict(response.headers)
if body is None:
body = response.read(decode_content=False)
# NOTE: 99% sure this is dead code. I'm only leaving it
# here b/c I don't have a test yet to prove
# it. Basically, before using
# `cachecontrol.filewrapper.CallbackFileWrapper`,
# this made an effort to reset the file handle. The
# `CallbackFileWrapper` short circuits this code by
# setting the body as the content is consumed, the
# result being a `body` argument is *always* passed
# into cache_response, and in turn,
# `Serializer.dump`.
response._fp = io.BytesIO(body)
data = {
"response": {
"body": _b64_encode_bytes(body),
"headers": dict(
(_b64_encode(k), _b64_encode(v))
for k, v in response.headers.items()
),
"status": response.status,
"version": response.version,
"reason": _b64_encode_str(response.reason),
"strict": response.strict,
"decode_content": response.decode_content,
},
}
# Construct our vary headers
data["vary"] = {}
if "vary" in response_headers:
varied_headers = response_headers['vary'].split(',')
for header in varied_headers:
header = header.strip()
data["vary"][header] = request.headers.get(header, None)
# Encode our Vary headers to ensure they can be serialized as JSON
data["vary"] = dict(
(_b64_encode(k), _b64_encode(v) if v is not None else v)
for k, v in data["vary"].items()
)
return b",".join([
b"cc=2",
zlib.compress(
json.dumps(
data, separators=(",", ":"), sort_keys=True,
).encode("utf8"),
),
])
def loads(self, request, data):
# Short circuit if we've been given an empty set of data
if not data:
return
# Determine what version of the serializer the data was serialized
# with
try:
ver, data = data.split(b",", 1)
except ValueError:
ver = b"cc=0"
# Make sure that our "ver" is actually a version and isn't a false
# positive from a , being in the data stream.
if ver[:3] != b"cc=":
data = ver + data
ver = b"cc=0"
# Get the version number out of the cc=N
ver = ver.split(b"=", 1)[-1].decode("ascii")
# Dispatch to the actual load method for the given version
try:
return getattr(self, "_loads_v{0}".format(ver))(request, data)
except AttributeError:
# This is a version we don't have a loads function for, so we'll
# just treat it as a miss and return None
return
def prepare_response(self, request, cached):
"""Verify our vary headers match and construct a real urllib3
HTTPResponse object.
"""
# Special case the '*' Vary value as it means we cannot actually
# determine if the cached response is suitable for this request.
if "*" in cached.get("vary", {}):
return
# Ensure that the Vary headers for the cached response match our
# request
for header, value in cached.get("vary", {}).items():
if request.headers.get(header, None) != value:
return
body_raw = cached["response"].pop("body")
headers = CaseInsensitiveDict(data=cached['response']['headers'])
if headers.get('transfer-encoding', '') == 'chunked':
headers.pop('transfer-encoding')
cached['response']['headers'] = headers
try:
body = io.BytesIO(body_raw)
except TypeError:
# This can happen if cachecontrol serialized to v1 format (pickle)
# using Python 2. A Python 2 str(byte string) will be unpickled as
# a Python 3 str (unicode string), which will cause the above to
# fail with:
#
# TypeError: 'str' does not support the buffer interface
body = io.BytesIO(body_raw.encode('utf8'))
return HTTPResponse(
body=body,
preload_content=False,
**cached["response"]
)
def _loads_v0(self, request, data):
# The original legacy cache data. This doesn't contain enough
# information to construct everything we need, so we'll treat this as
# a miss.
return
def _loads_v1(self, request, data):
try:
cached = pickle.loads(data)
except ValueError:
return
return self.prepare_response(request, cached)
def _loads_v2(self, request, data):
try:
cached = json.loads(zlib.decompress(data).decode("utf8"))
except ValueError:
return
# We need to decode the items that we've base64 encoded
cached["response"]["body"] = _b64_decode_bytes(
cached["response"]["body"]
)
cached["response"]["headers"] = dict(
(_b64_decode_str(k), _b64_decode_str(v))
for k, v in cached["response"]["headers"].items()
)
cached["response"]["reason"] = _b64_decode_str(
cached["response"]["reason"],
)
cached["vary"] = dict(
(_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
for k, v in cached["vary"].items()
)
return self.prepare_response(request, cached)
| mit | 8,844,433,486,149,418,000 | 1,248,983,160,552,467,200 | 32.346939 | 78 | 0.549266 | false |
abaditsegay/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Lib/heapq.py | 49 | 15994 | # -*- coding: Latin-1 -*-
"""Heap queue algorithm (a.k.a. priority queue).
Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
all k, counting elements from 0. For the sake of comparison,
non-existing elements are considered to be infinite. The interesting
property of a heap is that a[0] is always its smallest element.
Usage:
heap = [] # creates an empty heap
heappush(heap, item) # pushes a new item on the heap
item = heappop(heap) # pops the smallest item from the heap
item = heap[0] # smallest item on the heap without popping it
heapify(x) # transforms list into a heap, in-place, in linear time
item = heapreplace(heap, item) # pops and returns smallest item, and adds
# new item; the heap size is unchanged
Our API differs from textbook heap algorithms as follows:
- We use 0-based indexing. This makes the relationship between the
index for a node and the indexes for its children slightly less
obvious, but is more suitable since Python uses 0-based indexing.
- Our heappop() method returns the smallest item, not the largest.
These two make it possible to view the heap as a regular Python list
without surprises: heap[0] is the smallest item, and heap.sort()
maintains the heap invariant!
"""
# Original code by Kevin O'Connor, augmented by Tim Peters and Raymond Hettinger
__about__ = """Heap queues
[explanation by François Pinard]
Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
all k, counting elements from 0. For the sake of comparison,
non-existing elements are considered to be infinite. The interesting
property of a heap is that a[0] is always its smallest element.
The strange invariant above is meant to be an efficient memory
representation for a tournament. The numbers below are `k', not a[k]:
0
1 2
3 4 5 6
7 8 9 10 11 12 13 14
15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
In the tree above, each cell `k' is topping `2*k+1' and `2*k+2'. In
an usual binary tournament we see in sports, each cell is the winner
over the two cells it tops, and we can trace the winner down the tree
to see all opponents s/he had. However, in many computer applications
of such tournaments, we do not need to trace the history of a winner.
To be more memory efficient, when a winner is promoted, we try to
replace it by something else at a lower level, and the rule becomes
that a cell and the two cells it tops contain three different items,
but the top cell "wins" over the two topped cells.
If this heap invariant is protected at all time, index 0 is clearly
the overall winner. The simplest algorithmic way to remove it and
find the "next" winner is to move some loser (let's say cell 30 in the
diagram above) into the 0 position, and then percolate this new 0 down
the tree, exchanging values, until the invariant is re-established.
This is clearly logarithmic on the total number of items in the tree.
By iterating over all items, you get an O(n ln n) sort.
A nice feature of this sort is that you can efficiently insert new
items while the sort is going on, provided that the inserted items are
not "better" than the last 0'th element you extracted. This is
especially useful in simulation contexts, where the tree holds all
incoming events, and the "win" condition means the smallest scheduled
time. When an event schedule other events for execution, they are
scheduled into the future, so they can easily go into the heap. So, a
heap is a good structure for implementing schedulers (this is what I
used for my MIDI sequencer :-).
Various structures for implementing schedulers have been extensively
studied, and heaps are good for this, as they are reasonably speedy,
the speed is almost constant, and the worst case is not much different
than the average case. However, there are other representations which
are more efficient overall, yet the worst cases might be terrible.
Heaps are also very useful in big disk sorts. You most probably all
know that a big sort implies producing "runs" (which are pre-sorted
sequences, which size is usually related to the amount of CPU memory),
followed by a merging passes for these runs, which merging is often
very cleverly organised[1]. It is very important that the initial
sort produces the longest runs possible. Tournaments are a good way
to that. If, using all the memory available to hold a tournament, you
replace and percolate items that happen to fit the current run, you'll
produce runs which are twice the size of the memory for random input,
and much better for input fuzzily ordered.
Moreover, if you output the 0'th item on disk and get an input which
may not fit in the current tournament (because the value "wins" over
the last output value), it cannot fit in the heap, so the size of the
heap decreases. The freed memory could be cleverly reused immediately
for progressively building a second heap, which grows at exactly the
same rate the first heap is melting. When the first heap completely
vanishes, you switch heaps and start a new run. Clever and quite
effective!
In a word, heaps are useful memory structures to know. I use them in
a few applications, and I think it is good to keep a `heap' module
around. :-)
--------------------
[1] The disk balancing algorithms which are current, nowadays, are
more annoying than clever, and this is a consequence of the seeking
capabilities of the disks. On devices which cannot seek, like big
tape drives, the story was quite different, and one had to be very
clever to ensure (far in advance) that each tape movement will be the
most effective possible (that is, will best participate at
"progressing" the merge). Some tapes were even able to read
backwards, and this was also used to avoid the rewinding time.
Believe me, real good tape sorts were quite spectacular to watch!
From all times, sorting has always been a Great Art! :-)
"""
__all__ = ['heappush', 'heappop', 'heapify', 'heapreplace', 'merge',
'nlargest', 'nsmallest', 'heappushpop']
from itertools import islice, repeat, count, imap, izip, tee
from operator import itemgetter, neg
import bisect
def heappush(heap, item):
"""Push item onto heap, maintaining the heap invariant."""
heap.append(item)
_siftdown(heap, 0, len(heap)-1)
def heappop(heap):
"""Pop the smallest item off the heap, maintaining the heap invariant."""
lastelt = heap.pop() # raises appropriate IndexError if heap is empty
if heap:
returnitem = heap[0]
heap[0] = lastelt
_siftup(heap, 0)
else:
returnitem = lastelt
return returnitem
def heapreplace(heap, item):
"""Pop and return the current smallest value, and add the new item.
This is more efficient than heappop() followed by heappush(), and can be
more appropriate when using a fixed-size heap. Note that the value
returned may be larger than item! That constrains reasonable uses of
this routine unless written as part of a conditional replacement:
if item > heap[0]:
item = heapreplace(heap, item)
"""
returnitem = heap[0] # raises appropriate IndexError if heap is empty
heap[0] = item
_siftup(heap, 0)
return returnitem
def heappushpop(heap, item):
"""Fast version of a heappush followed by a heappop."""
if heap and heap[0] < item:
item, heap[0] = heap[0], item
_siftup(heap, 0)
return item
def heapify(x):
"""Transform list into a heap, in-place, in O(len(heap)) time."""
n = len(x)
# Transform bottom-up. The largest index there's any point to looking at
# is the largest with a child index in-range, so must have 2*i + 1 < n,
# or i < (n-1)/2. If n is even = 2*j, this is (2*j-1)/2 = j-1/2 so
# j-1 is the largest, which is n//2 - 1. If n is odd = 2*j+1, this is
# (2*j+1-1)/2 = j so j-1 is the largest, and that's again n//2-1.
for i in reversed(xrange(n//2)):
_siftup(x, i)
def nlargest(n, iterable):
"""Find the n largest elements in a dataset.
Equivalent to: sorted(iterable, reverse=True)[:n]
"""
it = iter(iterable)
result = list(islice(it, n))
if not result:
return result
heapify(result)
_heappushpop = heappushpop
for elem in it:
heappushpop(result, elem)
result.sort(reverse=True)
return result
def nsmallest(n, iterable):
"""Find the n smallest elements in a dataset.
Equivalent to: sorted(iterable)[:n]
"""
if hasattr(iterable, '__len__') and n * 10 <= len(iterable):
# For smaller values of n, the bisect method is faster than a minheap.
# It is also memory efficient, consuming only n elements of space.
it = iter(iterable)
result = sorted(islice(it, 0, n))
if not result:
return result
insort = bisect.insort
pop = result.pop
los = result[-1] # los --> Largest of the nsmallest
for elem in it:
if los <= elem:
continue
insort(result, elem)
pop()
los = result[-1]
return result
# An alternative approach manifests the whole iterable in memory but
# saves comparisons by heapifying all at once. Also, saves time
# over bisect.insort() which has O(n) data movement time for every
# insertion. Finding the n smallest of an m length iterable requires
# O(m) + O(n log m) comparisons.
h = list(iterable)
heapify(h)
return map(heappop, repeat(h, min(n, len(h))))
# 'heap' is a heap at all indices >= startpos, except possibly for pos. pos
# is the index of a leaf with a possibly out-of-order value. Restore the
# heap invariant.
def _siftdown(heap, startpos, pos):
newitem = heap[pos]
# Follow the path to the root, moving parents down until finding a place
# newitem fits.
while pos > startpos:
parentpos = (pos - 1) >> 1
parent = heap[parentpos]
if newitem < parent:
heap[pos] = parent
pos = parentpos
continue
break
heap[pos] = newitem
# The child indices of heap index pos are already heaps, and we want to make
# a heap at index pos too. We do this by bubbling the smaller child of
# pos up (and so on with that child's children, etc) until hitting a leaf,
# then using _siftdown to move the oddball originally at index pos into place.
#
# We *could* break out of the loop as soon as we find a pos where newitem <=
# both its children, but turns out that's not a good idea, and despite that
# many books write the algorithm that way. During a heap pop, the last array
# element is sifted in, and that tends to be large, so that comparing it
# against values starting from the root usually doesn't pay (= usually doesn't
# get us out of the loop early). See Knuth, Volume 3, where this is
# explained and quantified in an exercise.
#
# Cutting the # of comparisons is important, since these routines have no
# way to extract "the priority" from an array element, so that intelligence
# is likely to be hiding in custom __cmp__ methods, or in array elements
# storing (priority, record) tuples. Comparisons are thus potentially
# expensive.
#
# On random arrays of length 1000, making this change cut the number of
# comparisons made by heapify() a little, and those made by exhaustive
# heappop() a lot, in accord with theory. Here are typical results from 3
# runs (3 just to demonstrate how small the variance is):
#
# Compares needed by heapify Compares needed by 1000 heappops
# -------------------------- --------------------------------
# 1837 cut to 1663 14996 cut to 8680
# 1855 cut to 1659 14966 cut to 8678
# 1847 cut to 1660 15024 cut to 8703
#
# Building the heap by using heappush() 1000 times instead required
# 2198, 2148, and 2219 compares: heapify() is more efficient, when
# you can use it.
#
# The total compares needed by list.sort() on the same lists were 8627,
# 8627, and 8632 (this should be compared to the sum of heapify() and
# heappop() compares): list.sort() is (unsurprisingly!) more efficient
# for sorting.
def _siftup(heap, pos):
endpos = len(heap)
startpos = pos
newitem = heap[pos]
# Bubble up the smaller child until hitting a leaf.
childpos = 2*pos + 1 # leftmost child position
while childpos < endpos:
# Set childpos to index of smaller child.
rightpos = childpos + 1
if rightpos < endpos and not heap[childpos] < heap[rightpos]:
childpos = rightpos
# Move the smaller child up.
heap[pos] = heap[childpos]
pos = childpos
childpos = 2*pos + 1
# The leaf at pos is empty now. Put newitem there, and bubble it up
# to its final resting place (by sifting its parents down).
heap[pos] = newitem
_siftdown(heap, startpos, pos)
# If available, use C implementation
try:
from _heapq import heappush, heappop, heapify, heapreplace, nlargest, nsmallest, heappushpop
except ImportError:
pass
def merge(*iterables):
'''Merge multiple sorted inputs into a single sorted output.
Similar to sorted(itertools.chain(*iterables)) but returns a generator,
does not pull the data into memory all at once, and assumes that each of
the input streams is already sorted (smallest to largest).
>>> list(merge([1,3,5,7], [0,2,4,8], [5,10,15,20], [], [25]))
[0, 1, 2, 3, 4, 5, 5, 7, 8, 10, 15, 20, 25]
'''
_heappop, _heapreplace, _StopIteration = heappop, heapreplace, StopIteration
h = []
h_append = h.append
for itnum, it in enumerate(map(iter, iterables)):
try:
next = it.next
h_append([next(), itnum, next])
except _StopIteration:
pass
heapify(h)
while 1:
try:
while 1:
v, itnum, next = s = h[0] # raises IndexError when h is empty
yield v
s[0] = next() # raises StopIteration when exhausted
_heapreplace(h, s) # restore heap condition
except _StopIteration:
_heappop(h) # remove empty iterator
except IndexError:
return
# Extend the implementations of nsmallest and nlargest to use a key= argument
_nsmallest = nsmallest
def nsmallest(n, iterable, key=None):
"""Find the n smallest elements in a dataset.
Equivalent to: sorted(iterable, key=key)[:n]
"""
if key is None:
it = izip(iterable, count()) # decorate
result = _nsmallest(n, it)
return map(itemgetter(0), result) # undecorate
in1, in2 = tee(iterable)
it = izip(imap(key, in1), count(), in2) # decorate
result = _nsmallest(n, it)
return map(itemgetter(2), result) # undecorate
_nlargest = nlargest
def nlargest(n, iterable, key=None):
"""Find the n largest elements in a dataset.
Equivalent to: sorted(iterable, key=key, reverse=True)[:n]
"""
if key is None:
it = izip(iterable, imap(neg, count())) # decorate
result = _nlargest(n, it)
return map(itemgetter(0), result) # undecorate
in1, in2 = tee(iterable)
it = izip(imap(key, in1), imap(neg, count()), in2) # decorate
result = _nlargest(n, it)
return map(itemgetter(2), result) # undecorate
if __name__ == "__main__":
# Simple sanity test
heap = []
data = [1, 3, 5, 7, 9, 2, 4, 6, 8, 0]
for item in data:
heappush(heap, item)
sort = []
while heap:
sort.append(heappop(heap))
print sort
import doctest
doctest.testmod()
| apache-2.0 | 7,261,208,069,011,147,000 | -7,962,514,971,972,103,000 | 39.697201 | 96 | 0.662686 | false |
Aderemi/Artificial-Intelligence | search_algo.py | 1 | 8943 | from collections import deque
from math import sqrt
import sys
import copy
import bisect
class Problem:
def __init__(self, initial_board, goal_board):
self.initial_board = Board(initial_board)
self.goal_board = Board(goal_board)
def actions(self, board):
#UDLR
possible_moves = [["Down", "Right"],
["Down", "Left", "Right"],
["Left", "Down"],
["Up", "Down", "Right"],
["Up", "Down", "Left", "Right"],
["Up", "Down", "Left"],
["Up", "Right"],
["Up", "Left", "Right"],
["Up", "Left"]]
return possible_moves[board.state.index("0")]
def result(self, board, action):
if action == "Left":
return board.move("Left")
elif action == "Right":
return board.move("Right")
elif action == "Down":
return board.move("Down")
elif action == "Up":
return board.move("Up")
def goalTest(self, board):
return board == self.goal_board
def pathCost(self, cost, board_now, action, next_board):
return cost + 1
class Board:
def __init__(self, state, parent = None, action = None, path_cost = 0):
self.state = copy.copy(state)
self.parent = parent
self.action = action
self.path_cost = path_cost
self.depth = 0
if parent:
self.depth = parent.depth + 1
def __eq__(self, other):
return isinstance(other, Board) and self.state == other.state
def __str__(self):
return "<| Board Items: {} |>".format(self.state)
def __lt__(self, node):
return self.path_cost < node.path_cost
def __hash__(self):
return hash((",").join(self.state))
def swapStateContent(self, empty_pos, new_pos):
new_pos_holder = self.state[new_pos]
self.state[new_pos] = "0"
self.state[empty_pos] = new_pos_holder
return self
def move(self, direction):
empty_pos = self.state.index("0")
up_down_gauge = int(sqrt(len(self.state)))
if direction == "Left":
new_pos = empty_pos - 1
return self.swapStateContent(empty_pos, new_pos)
elif direction == "Right":
new_pos = empty_pos + 1
return self.swapStateContent(empty_pos, new_pos)
elif direction == "Up":
new_pos = empty_pos - up_down_gauge
return self.swapStateContent(empty_pos, new_pos)
elif direction == "Down":
new_pos = empty_pos + up_down_gauge
return self.swapStateContent(empty_pos, new_pos)
def expand(self, problem):
m_list = set()
for action in problem.actions(self):
child = self.childBoard(problem, action)
m_list.add(child)
#print(child.state, action)
return m_list
def childBoard(self, problem, action):
my_copy = Board(self.state, self.parent, self.action, self.path_cost)
next_board = problem.result(my_copy, action)
return Board(next_board.state, self, action, problem.pathCost(self.path_cost, self.state, action, next_board.state))
def traceBack(self):
board, parent_n_granies = self, [self]
while board.parent:
parent_n_granies.append(board)
board = board.parent
return parent_n_granies;
def solution(self, string = False):
solution_actions = [board.action for board in self.traceBack()]
return ",".join(solution_actions) if string else solution_actions
class QueueType:
def __init__(self, items=[], length = None):
self.Queue = deque(items, length)
def __len__(self):
return len(self.Queue)
def __contains__(self, item):
return item in self.Queue
def pop(self):
if len(self.Queue) > 0:
return self.Queue.popleft()
else :
raise Exception('Queue is empty')
def addItem(self, item):
if not len(self.Queue) < self.Queue.maxlen:
self.Queue.append(item)
else:
raise Exception('Queue is full')
def addItems(self, items):
if not len(items) + len(self.Queue) <= self.Queue.maxlen:
self.Queue.extend(items)
else:
raise Exception('Queue max length will be overflown')
def length(self):
return len(self.Queue)
def contains(self, item):
return item in self.Queue
def StackType():
return []
class PriorityQueueType():
def __init__(self, direction = 'smallest', f = lambda x: x):
self.container = []
self.direction = direction
self.func = f
def __delitem__(self, elem):
for i, (value, elem) in enumerate(self.container):
if elem == key:
self.container.pop(i)
def __len__(self):
return len(self.container)
def __contains__(self, elem):
return any(elem == child[1] for child in self.container)
def __getitem__(self, key):
for _, item in self.container:
if item == key:
return item
def append(self, elem):
bisect.insort_right(self.container, (self.func(elem), elem))
def pop(self):
if self.direction == 'smallest':
return self.container.pop(0)[1]
else:
return self.container.pop()[1]
class ImplementSearch:
def __init__(self, algo, problem, func = None):
if algo == "BFS":
self.breadthFirstSearch(problem)
elif algo == "DFS":
self.depthFirstSearch(problem)
elif algo == "AST":
self.aStarSearch(problem)
def breadthFirstSearch(self, problem):
ini_board = problem.initial_board
if problem.goalTest(ini_board):
print(ini_board)
return ini_board
frontier = QueueType()
frontier.addItem(ini_board)
explored = []
while frontier:
board = frontier.pop()
if(board.state not in explored):
explored.append(board.state)
print(board.state, board.action, board.path_cost)
print(".................................")
for child in board.expand(problem):
if child.state not in explored and child not in frontier:
if problem.goalTest(child):
print(child)
return child
sys.exit()
frontier.addItem(child)
return None
def depthFirstSearch(self, problem):
ini_board = problem.initial_board
frontier = StackType()
frontier.append(ini_board)
explored = []
while frontier:
board = frontier.pop()
if problem.goalTest(board):
return board
if(board.state not in explored):
explored.append(board.state)
print(board.state, board.action, board.path_cost)
print(".................................")
frontier.extend(child for child in board.expand(problem)
if child.state not in explored and
child not in frontier)
return None
def aStarSearch(self, problem):
func = heuristic_h
board = problem.initial_board
if problem.goalTest(board):
return board
frontier = PriorityQueueType("smallest", func)
frontier.append(board)
explored = []
while frontier:
board = frontier.pop()
if problem.goalTest(board):
print(board.solution())
return board
explored.append(board.state)
#print(board.state, board.action, board.path_cost, func(board))
#print(".................................")
for child in board.expand(problem):
if child.state not in explored and child not in frontier:
frontier.append(child)
elif child in frontier:
incumbent = frontier[child]
if func(child) < func(incumbent):
del frontier[incumbent]
frontier.append(child)
return None
def cacheFuncValues(fn, slot = None, maxsize = 32):
"""Memoize fn: make it remember the computed value for any argument list.
If slot is specified, store result in that slot of first argument.
If slot is false, use lru_cache for caching the values."""
if slot:
def memoized_fn(obj, *args):
if hasattr(obj, slot):
return getattr(obj, slot)
else:
val = fn(obj, *args)
setattr(obj, slot, val)
return val
else:
@functools.lru_cache(maxsize=maxsize)
def memoized_fn(*args):
return fn(*args)
return memoized_fn
def heuristic_h(board):
goal = ["0","1","2","3","4","5","6","7","8"]
return sum(abs(int(s) % 3 - int(g) % 3) + abs(int(s) // 3 - int(g) // 3)
for s, g in ((board.state.index(str(i)), goal.index(str(i))) for i in range(1, 9))) + board.path_cost
def writeToFile(line):
f = open('output.txt', 'a')
f.write(line)
f.write("\n")
f.close()
if __name__ == "__main__":
algo = sys.argv[1]
problem_string = sys.argv[2]
print(algo)
f = open('output.txt', 'w')
f.write("---------------------------------------------------------------------------\n")
f.write(" First Men AI Search Algorithm \n")
f.write("---------------------------------------------------------------------------\n")
f.close()
problem = Problem(problem_string.split(","), ["0","1","2","3","4","5","6","7","8"])
ImplementSearch(algo, problem)
| mit | 3,970,816,001,531,675,600 | -644,181,820,516,895,100 | 28.010067 | 118 | 0.58761 | false |
rechner/Taxidi | dblib/postgres.py | 1 | 40884 | #!/usr/bin/env python
#-*- coding:utf-8 -*-
# dblib/postgres.py (spaces, not tabs)
# PostgreSQL database driver for Taxídí.
# Zac Sturgeon <admin@jkltech.net>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
# All database tables and features are documented here:
# http://jkltech.net/taxidi/wiki/Database
#Don't forget to commit the database often, even after a select.
#Had problems with the 'data' table being a reserved keyword, so table
# and column names should always be escaped. Column names are case-
# insensitive otherwise.
debug = True
import os
import sys
import logging
import time
import datetime
import psycopg2
import hashlib
# one directory up
_root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, _root_dir)
import taxidi
#Signaling constants
SUCCESS = 1
OK = 1
FAIL = 2
EMPTY_RESULT = 4
USER_EXISTS = 8
CONSTRAINT_FAILED = 16
UNKNOWN_ERROR = 32
INVALID_PASSWORD = 64
AUTHORIZED = 1
UNAUTHORIZED = 0
NEW = 128
RESET_TABLES = 256
#Database schema version (integer):
database_version = 1
class Database:
"""
PostgreSQL driver for Taxídí database.
"""
def __init__(self, host, dbname, user, password, location='pyTaxidi'):
"""
Opens connection to a PostgreSQL database. Creates tables if they don't
exist, but expects the database was created by the admin.
"""
self.columns = """data.id, name, lastname, dob, data.activity, data.room, grade, phone,
"mobileCarrier", paging, parent1, "parent1Link", parent2,
"parent2Link", "parentEmail", medical, "joinDate", "lastSeen",
"lastModified", count, visitor, "noParentTag", barcode,
picture, authorized, unauthorized, notes"""
self.tables = [ "data", 'barcode', 'authorized', 'unauthorized',
'volunteers', 'categories', 'users', 'activities',
'services', 'rooms', 'carriers', 'statistics' ]
self.tableSQL = []
self.tableSQL.append("""CREATE TABLE data(id SERIAL primary key,
name text, lastname text, dob text, activity integer,
room integer, grade text, phone text,
"mobileCarrier" integer, paging text, parent1 text,
parent2 text, "parent1Link" text, "parent2Link" text,
"parentEmail" text, medical text, "joinDate" DATE,
"lastSeen" DATE, "lastModified" TIMESTAMP, count integer,
visitor bool, expiry text, "noParentTag" bool,
barcode integer, picture text, authorized integer,
unauthorized integer, notes text);""")
self.tableSQL.append("""CREATE TABLE barcode(id SERIAL primary key ,
value text NOT NULL, ref integer REFERENCES "data"(id));""")
self.tableSQL.append("""CREATE TABLE authorized(id SERIAL,
ref integer, name text, lastname text, dob text,
"docNumber" text, photo text, document text, "phoneHome" text,
"phoneMobile" text, "mobileCarrier" integer, notes text);""")
self.tableSQL.append("""CREATE TABLE unauthorized(id SERIAL,
ref integer, name text, lastname text, photo text,
document text, phone text, notes text);""")
self.tableSQL.append("""CREATE TABLE volunteers(id SERIAL,
name text, lastname text, dob text, email text,
username text, "phoneHome" text, "phoneMobile" text,
"mobileCarrier" integer, "backgroundCheck" bool,
"backgroundDocuments" text, profession text, title text,
company text, "jobContact" text, address text, city text,
zip text, state text, country text, nametag bool,
category text, subtitle text, services text, rooms text,
"notifoUser" text, "notifoSecret" text,
availability text, "joinDate" DATE, "lastSeen" DATE,
"lastModified" TIMESTAMP, picture text, notes text);""")
self.tableSQL.append("""CREATE TABLE categories(id SERIAL,
name text, admin integer);""")
self.tableSQL.append("""CREATE TABLE users(id SERIAL,
"user" text UNIQUE NOT NULL, hash text, salt text,
admin bool, "notifoUser" text, "notifoSecret" text,
"scATR" text, "leftHanded" bool, ref int, name text);""")
self.tableSQL.append("""CREATE TABLE activities(id SERIAL,
name text, prefix text, "securityTag" text, "securityMode" text,
"nametagEnable" bool, nametag text,
"parentTagEnable" bool, "parentTag" text,
admin integer, "autoExpire" bool, "notifyExpire" bool,
newsletter bool, "newsletterLink" text,
"registerSMSEnable" bool, "registerSMS" text,
"registerEmailEnable" bool, "registerEmail" text,
"checkinSMSEnable" bool, "checkinSMS" text,
"checkinEmailEnable" bool, "checkinEmail" text,
"parentURI" text, "alertText" text);""")
self.tableSQL.append("""CREATE TABLE services(id SERIAL,
name text, day integer, time TIME, "endTime" TIME);""")
self.tableSQL.append("""CREATE TABLE rooms(id SERIAL,
name text NOT NULL, activity integer NOT NULL,
"volunteerMinimum" integer, "maximumOccupancy" integer, camera text,
"cameraFPS" integer, admin integer, "notifoUser" text, "notifoSecret" text,
email text, mobile text, carrier integer);""")
self.tableSQL.append( """CREATE TABLE carriers(id SERIAL,
name text, region text, address text, subject text,
message text);""")
self.tableSQL.append("""CREATE TABLE statistics(id SERIAL,
person integer, date date,
service text, expires text,
checkin timestamp, checkout timestamp, code text, location text,
volunteer integer, activity text, room text);""")
#Setup logging
self.log = logging.getLogger(__name__)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('[%(asctime)s] %(module)-6s [%(levelname)-8s] %(message)s')
ch.setFormatter(formatter)
self.log.addHandler(ch)
if debug:
self.log.setLevel(logging.DEBUG)
else:
self.log.setLevel(logging.INFO)
#Create connection:
try: #TODO: Add SSL/SSH tunnel
if ':' in host:
host, port = host.split(':')
else:
port = 5432
self.conn = psycopg2.connect(host=host, database=dbname,
user=user, password=password, port=port)
#application_name=location)
self.cursor = self.conn.cursor()
except psycopg2.OperationalError as e:
if e.pgcode == '28P01' or e.pgcode == '28000':
raise DatabaseError(INVALID_PASSWORD)
else:
#Unhandled error. Show it to the user.
raise DatabaseError(FAIL, e)
self.log.info("Created PostgreSQL database instance on host {0}.".format(host))
self.log.debug("Checking for tables and creating them if not present....")
self.status = OK
self.createTables()
def spawnCursor(self):
"""
Returns a new cursor object (for multi-threadding use).
Delete it when done.
"""
return self.conn.cursor()
def createTables(self):
for i in range(len(self.tables)):
#Not user-controled data, so a .format() is okay here.
exists = self.execute(
"SELECT true FROM pg_class WHERE relname = '{0}';".format(self.tables[i]))
if not exists:
#Create it:
self.status = RESET_TABLES
self.log.info("Creating table {0}".format(self.tables[i]))
self.execute(self.tableSQL[i])
self.commit()
self.commit()
def commit(self):
self.log.debug('Committed database')
self.conn.commit()
def close(self):
"""
Close the connection and clean up the objects.
Don't forget to call this when exiting the program.
"""
self.cursor.close()
self.conn.close()
del self.cursor
del self.conn
def execute(self, sql, args=(''), cursor=None):
"""Executes SQL, reporting debug to the log. For internal use."""
if debug:
sql = sql.replace(' ', '').replace('\n', ' ') #make it pretty
if args != (''):
self.log.debug(sql % args)
else:
self.log.debug(sql)
try:
self.cursor.execute(sql, args)
try:
return self.cursor.fetchall()
except psycopg2.ProgrammingError:
return True
except (psycopg2.ProgrammingError, psycopg2.OperationalError) as e:
self.log.error('psycopg2 returned operational error: {0}'
.format(e))
if self.conn:
self.conn.rollback() #drop any changes to preserve db.
raise
def dict_factory(self, row):
d = {}
for idx, col in enumerate(self.cursor.description):
d[col[0]] = row[idx]
return d
def to_dict(self, a):
"""
Converts results from a cursor object to a nested dictionary.
"""
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
# == data functions ==
# Populate all fields for registering a child only. Entries without
# a default are mandatory. Form should check for missing stuff.
def Register(self, name, lastname, phone, parent1, paging='',
mobileCarrier=0, activity=0, room=0, grade='', parent2='',
parent1Link='', parent2Link='', parentEmail='', dob='',
medical='', joinDate='', lastSeen='', lastModified='', count=0,
visitor=False, expiry=None, noParentTag=None, barcode=None,
picture='', authorized=None, unauthorized=None, notes=''):
"""Enter a new child's record into the `"data"` table.
name, lastname, phone, parent1, paging=''
mobileCarrier=0, activity=0, room=0, grade='', parent2='',
parent1Link='', parent2Link='', parentEmail='', dob=''
medical='', joinDate='', lastSeen='', lastModified='', count=0
visitor=False, noParentTag=False, barcode=None, picture='',
authorized=None, unauthorized=None, notes=''
Returns the id of the newly created record.
Be sure to create entry in barcode, unauthorized, or authorized table
before creating a record here.
If registering, be sure to call this before checkin() on the record itself.
Remember to call commit() after creating all these entries.
"""
#set dates:
if joinDate == '': #Generally should always be true (unless
joinDate = str(datetime.date.today()) #importing from script
if lastSeen == '':
lastSeen = str(datetime.date.today()) #ISO8601 (YYYY-MM-DD)
if lastModified == '':
#should be plain ISO 8601 (required for Postgres timestamp type)
#~ lastModified = time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime())
lastModified = datetime.datetime.now()
#~ lastModified = time.ctime() #without timezone.
#escape and execute
self.execute("""INSERT INTO "data"(name, lastname, dob, phone,
paging, parent1, "mobileCarrier", activity, room, grade,
parent2, "parent1Link", "parent2Link", "parentEmail", medical,
"joinDate", "lastSeen", "lastModified", count, visitor, expiry,
"noParentTag", barcode, picture, notes) VALUES
(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,
%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);""",
(name, lastname, dob, phone, paging, parent1, mobileCarrier,
activity, room, grade, parent2, parent1Link, parent2Link,
parentEmail, medical, joinDate, lastSeen, lastModified, count,
visitor, expiry, noParentTag, barcode, picture, notes))
self.commit()
ret = self.execute("""SELECT id FROM "data" WHERE
name = %s AND lastname = %s
AND phone = %s""", (name, lastname, phone))
self.commit()
if len(ret) > 1:
for i in ret[0]:
self.log.warn('Duplicate entry found at {0}.'.format(i))
if ret == []:
raise DatabaseError(EMPTY_RESULT, 'Record not committed.')
else:
return ret[0][0]
def Delete(self, index):
"""Delete a row in the data table by index."""
self.execute("DELETE FROM \"data\" WHERE id = %s;", (index,))
self.commit()
def Update(self, index, name, lastname, phone, parent1, paging='',
mobileCarrier=0, activity=0, room=0, grade='', parent2='',
parent1Link='', parent2Link='', parentEmail='', dob='',
medical='', joinDate=None, lastSeen=None, count=0,
visitor=False, expiry=None, noParentTag=None, barcode=None,
picture='', authorized=None, unauthorized=None, notes=''):
"""Update a record. Pass index as first argument. lastModified automatically set.
index, name, lastname, dob, phone, paging, and parent1 are mandatory.
Defaults are as follows: mobileCarrier=0, activity=0, room=0, grade='',
parent2='', parent1Link='', parent2Link='', parentEmail='', medical='',
joinDate='', lastSeen='', visitor=False,
noParentTag=False, barcode='', picture='', notes=''
"""
try:
self.execute("UPDATE \"data\" SET name=%s, lastname=%s WHERE id=%s;", (name, lastname, index))
self.execute("UPDATE \"data\" SET dob=%s WHERE id=%s;", (dob, index))
self.execute("UPDATE \"data\" SET phone=%s, paging=%s WHERE id=%s;",(phone, paging, index))
self.execute("UPDATE \"data\" SET \"mobileCarrier\"=%s WHERE id=%s;",
(mobileCarrier, index))
self.execute("""UPDATE "data" SET parent1=%s, parent2=%s,
"parent1Link"=%s, "parent2Link"=%s WHERE id=%s""", (parent1,
parent2, parent1Link, parent2Link, index))
self.execute("UPDATE \"data\" SET activity=%s, room=%s, grade=%s WHERE id=%s;",
(activity, room, grade, index))
self.execute("UPDATE \"data\" SET \"parentEmail\"=%s, medical=%s WHERE id=%s;",
(parentEmail, medical, index))
if joinDate != None:
self.execute("UPDATE \"data\" SET \"joinDate\"=%s WHERE id=%s;",
(joinDate, index))
if lastSeen != None:
self.execute("UPDATE \"data\" SET \"lastSeen\"=%s WHERE id=%s;",
(lastSeen, index))
self.execute("UPDATE \"data\" SET \"lastModified\"=%s WHERE id=%s;",
(datetime.datetime.now(), index))
self.execute("""UPDATE "data" SET visitor=%s, expiry=%s, "noParentTag"=%s,
barcode=%s, picture=%s, notes=%s WHERE id=%s;""", (visitor, expiry,
noParentTag, barcode, picture, notes, index))
except psycopg2.Error as e:
self.log.error(e)
self.log.error("Error while updating. Rolling back transaction....")
self.conn.rollback()
raise
self.commit()
# === end data functions ===
# === begin search functions ===
def Search(self, query):
"""
Generic search function.
Searches first through `data`, then passes to SearchVolunteer()
Accepts query as first argument. Searches the following in data table:
- Last four digits of phone number (if len == 4)
- paging(?)
- lastname
- firstname
Then searches through volunteers table.
"""
a = []
if query.isdigit() and (len(query) == 4 or len(query) == 7) \
or query[0] == '+':
#search by phone.
a = self.SearchPhone(query)
if not query.isdigit(): #Search in names.
a = self.SearchName(query)
if len(a) == 0:
#Search partial names:
a = self.SearchName(query+'%')
#check if hex:
try:
hexval = int(query, 16)
isHex = True
except:
isHex = False
if len(query) == 3 or (isHex and len(query) == 4) and len(a) == 0:
a = self.SearchSecure(query)
if len(a) == 0: #Catch barcodes
a = self.SearchBarcode(query)
#TODO: Search volunteers:
return a
def SearchName(self, query):
"""
Searches only in name, lastname, parent's column.
Returns *. '*' and '%' are treated as wild-card characters, and will
search using the LIKE operator.
"""
if ("%" in query) or ("*" in query):
query = query.replace("*", "%")
a = self.execute("""SELECT DISTINCT {0} FROM "data" WHERE name LIKE %s
OR lastname LIKE %s
or parent1 LIKE %s
or parent2 LIKE %s
ORDER BY lastname;
""".format(self.columns), (query,)*4)
else:
a = self.execute("""SELECT DISTINCT {0} FROM "data" WHERE
name ILIKE %s
OR lastname ILIKE %s
OR parent1 ILIKE %s
OR parent2 ILIKE %s
ORDER BY lastname;
""".format(self.columns), (query,)*4)
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
def SearchBarcode(self, query):
"""
Searches for an entry (only in the data table) by barcode.
"""
a = self.execute("""SELECT DISTINCT {0} FROM "data"
INNER JOIN barcode ON "data".id = barcode.ref
WHERE barcode.value = %s;
""".format(self.columns), (query,))
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
def SearchPhone(self, query):
"""
Searches for an entry by entire or last four digits of phone number.
"""
query = str(query)
#Most of this is taken out of my input validator
if len(query) == 4:
#Search by last four:
query = '%' + query
a = self.execute("""SELECT DISTINCT {0} FROM "data"
WHERE phone LIKE %s
ORDER BY lastname;
""".format(self.columns), (query,))
elif query.isdigit() and len(query) == 10 \
and query[0] not in '01' and query[3] not in '01': #US: '4805551212'
a = self.execute("""SELECT DISTINCT {0} FROM "data"
WHERE phone = %s
ORDER BY lastname;
""".format(self.columns), (query,))
elif len(query) == 12 and query[3] in '.-/' \
and query[7] in '.-/': #US: '334-555-1212'
trans = Translator(delete='+(-)./ ')
query = trans(query.encode('ascii'))
a = self.execute("""SELECT DISTINCT {0} FROM "data"
WHERE phone = %s
ORDER BY lastname;
""".format(self.columns), (query,))
elif query[0] == '(' and len(query) == 14: #US: (480) 555-1212
query = query[1:4] + query[6:9] + query[10:14]
a = self.execute("""SELECT DISTINCT {0} FROM "data"
WHERE phone = %s
ORDER BY lastname;
""".format(self.columns), (query,))
elif query[0] == '+': #International format
trans = Translator(delete='+(-)./ ')
query = trans(query.encode('ascii'))
a = self.execute("""SELECT DISTINCT {0} FROM "data"
WHERE phone = %s
ORDER BY lastname;
""".format(self.columns), (query,))
elif len(query) == 7:
#Search by last seven:
query = '%' + query
a = self.execute("""SELECT DISTINCT {0} FROM "data"
WHERE phone LIKE %s
ORDER BY lastname;
""".format(self.columns), (query,))
else:
self.log.warn("Search key {0} probably isn't a phone number.")
a = self.execute("""SELECT DISTINCT {0} FROM "data"
WHERE phone = %s
ORDER BY lastname;
""".format(self.columns), (query,))
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
def SearchSecure(self, query):
"""
Searches for a record by the security code assigned at check-in, if applicable.
"""
a = self.execute("""SELECT DISTINCT {0} FROM data
INNER JOIN statistics ON data.id = statistics.person
WHERE statistics.code = %s;
""".format(self.columns), (query.upper(),))
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
# === end search functions ===
def GetRecordByID(self, ref):
"""
Returns a single row specified by id.
"""
a = self.execute("SELECT * FROM data WHERE id = %s", (ref,))
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret[0]
# === barcode functions ===
def GetBarcodes(self, record):
"""
Returns all barcodes listed for a given record ID.
"""
a = self.execute("""SELECT DISTINCT id, value FROM barcode
WHERE ref = %s ORDER BY id;""", (record,))
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
#~ ret.append(i)
return ret
def AddBarcode(self, record, value):
self.execute("""INSERT INTO barcode(value, ref)
VALUES (%s, %s);""", (value, record))
def RemoveBarcode(self, ref):
self.execute("DELETE FROM barcode WHERE id = %s;", (ref,))
def RemoveAllBarcodes(self, ref):
"""
Deletes all barcodes for a given record (nuke)
"""
self.execute("DELETE FROM barcode WHERE ref = %s;", (ref,))
def UpdateBarcode(self, ref, value):
self.execute("UPDATE barcode SET value = %s WHERE id = %s", (value, ref))
# === end barcode functions ===
# === services functions ===
def GetServices(self):
a = self.execute("SELECT * FROM services ORDER BY id;")
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
def AddService(self, name, day=0, time='00:00:00', endTime='23:59:59'):
self.execute("""INSERT INTO services(name, day, time, "endTime")
VALUES (%s, %s, %s, %s);""", (name, day, time, endTime))
def RemoveService(self, ref):
self.execute("DELETE FROM services WHERE id = %s;", (ref,))
def UpdateService(self, ref, name, day, time, endTime):
self.execute("""UPDATE services SET name = %s,
day = %s, time = %s, "endTime" = %s WHERE id = %s;""",
(name, day, time, endTime, ref))
# === end services functions ===
# === activities functions ===
def GetActivities(self):
a = self.execute("SELECT * FROM activities;")
ret = []
for i in a:
if i == None: i = u'—'
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
def GetActivity(self, ref):
"""
Converts a reference to the activity table to an explicit string value
(for reading a record's assigned activity with no forgein key support).
"""
a = self.execute("SELECT name FROM activities WHERE id = %s;", (ref,))
if len(a) > 0:
return a[0][0]
else:
return None
def GetActivityById(self, ref):
a = self.execute("SELECT * FROM activities WHERE id = %s;", (ref,))
if len(a) > 0:
return self.dict_factory(a[0])
else:
return None
def AddActivity(self, name, prefix='', securityTag=False, securityMode='simple',
nametag='default', nametagEnable=True,
parentTag='default', parentTagEnable=True, admin=None,
autoExpire = False, notifyExpire = False, newsletter=False,
newsletterLink='', parentURI='', alert=''):
if prefix == '' or prefix == None:
prefix = name[0].upper()
self.execute("""INSERT INTO activities(name, prefix, "securityTag",
"securityMode", "nametagEnable", nametag,
"parentTagEnable", "parentTag", admin, "autoExpire",
"notifyExpire", newsletter, "newsletterLink",
"parentURI", "alertText")
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s);""",
(name, prefix, securityTag, securityMode,
nametagEnable, nametag, parentTagEnable, parentTag,
admin, autoExpire, notifyExpire, newsletter,
newsletterLink, parentURI, alert))
def RemoveActivity(self, ref):
self.execute("DELETE FROM activities WHERE id = %s;", (ref,))
def UpdateActivity(self, ref, name, prefix, securityTag, securityMode,
nametag, nametagEnable, parentTag,
parentTagEnable, admin, autoExpire, notifyExpire,
newsletter, newsletterLink):
if prefix == '' or prefix == None:
prefix = name[0].upper()
self.execute("""UPDATE activities SET name = %s, prefix = %s,
securityTag = %s, securityMode = %s,
nametag = %s, nametagEnable = %s, parentTag = %s,
parentTagEnable = %s, admin = %s, autoExpire = %s,
notifyExpire = %s, newsletter = %s,
newsletterLink = %s WHERE id = %s;""", (name, prefix,
securityTag, securityMode, nametag,
nametagEnable, parentTag, parentTagEnable, admin,
autoExpire, notifyExpire, newsletter,
newsletterLink, ref))
# === end activities functions ==
# === rooms functions ===
def AddRoom(self, name, activity, volunteerMinimum=0, maximumOccupancy=0,
camera='', cameraFPS=0, admin=0, notifoUser=None,
notifoSecret=None, email='', mobile='', carrier=None):
#Check to see that activity exists:
ret = self.execute('SELECT id FROM activities WHERE id = %s;',
(activity,))
if len(ret) == 1:
#Activity exists. Create room.
self.execute("""INSERT INTO rooms(name, activity, "volunteerMinimum",
"maximumOccupancy", camera, "cameraFPS", admin,
"notifoUser", "notifoSecret", email, mobile, carrier)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)""",
(name, activity, volunteerMinimum, maximumOccupancy,
camera, cameraFPS, admin, notifoUser,
notifoSecret, email, mobile, carrier))
return SUCCESS
else:
return CONSTRAINT_FAILED #Forgein key constraint failed
def GetRooms(self):
a = self.execute('SELECT * FROM rooms;')
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
def GetRoomByID(self, ref):
"""
Returns a room name specified from a reference (for displaying results).
"""
a = self.execute('SELECT name FROM rooms WHERE id = %s;', (ref,))
if a != None:
try:
return a[0][0] #Return string
except IndexError:
return ''
else:
return ''
def GetRoom(self, activity):
"""
Returns rooms dictionary matching a given activity (by name).
"""
a = self.execute("""SELECT rooms.*
FROM rooms
INNER JOIN activities ON
activities.id = rooms.activity
WHERE activities.name = %s;""",
(activity,))
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
def GetRoomID(self, name):
"""
Return's a room's primary key (id) given a name.
"""
a = self.execute("SELECT id FROM rooms WHERE name = %s;", (name,))
if a != None:
return a[0]
else:
return ''
def RemoveRoom(self, ref):
self.execute("DELETE FROM rooms WHERE id = %s;", (ref,))
# === end room functions ===
# === users functions ===
def GetUsers(self):
a = self.execute("""SELECT "user", admin, "notifoUser", "notifoSecret",
"scATR", "leftHanded", ref FROM users;""")
return self.to_dict(a)
def GetUser(self, user):
#Should only return one row
return self.to_dict(self.execute("SELECT * FROM users WHERE \"user\" = %s;", (user,)))[0]
def UserExists(self, user):
a = self.execute("SELECT id FROM \"users\" WHERE \"user\"= %s;", (user,))
self.commit()
if len(a) == 0:
return False
else:
return True
def AddUser(self, user, password, admin=False, notifoUser=None,
notifoSecret=None, scATR=None, leftHanded=False, ref=None):
#Check that the user doesn't exist:
if len(self.execute("SELECT * FROM users WHERE user = %s;", \
(user,))) != 0:
self.commit()
return USER_EXISTS
salt = os.urandom(29).encode('base_64').strip('\n') #Get a salt
if password == '': #Set a random password
password = os.urandom(8).encode('base_64').strip('\n')
ph = hashlib.sha256(password + salt)
ph.hexdigest()
try:
self.execute("""INSERT INTO "users"("user", hash, salt, admin, "notifoUser",
"notifoSecret", "scATR", "leftHanded", ref) VALUES
(%s, %s, %s, %s, %s, %s, %s, %s, %s);""",
(user, ph.hexdigest(), salt, admin, notifoUser,
notifoSecret, scATR, leftHanded, ref))
except psycopg2.IntegrityError:
return USER_EXISTS
finally:
self.commit()
return SUCCESS
def RemoveUser(self, user):
"""
Remove an user from the system by username.
"""
self.execute("DELETE FROM users WHERE \"user\" = %s;", (user,))
def AuthenticateUser(self, user, password):
if self.UserExists(user):
info = self.GetUser(user)
passhash = hashlib.sha256(password + info['salt'])
if info['hash'] == passhash.hexdigest():
return 1
return 0
# == end users functions ==
# === Check-in functions ===
def DoCheckin(self, person, services, expires, code, location, activity, room, cursor=None):
"""
person: id reference of who's being checked-in.
services: a tuple of services to be checked-in for. Pass singleton if only one.
Services should be passed in chronological order!
expires: expiration time, if applicable, of the last service chronologically.
code: secure code, or hashed value on child's tag if not in simple mode.
location: text location to identify kiosk used for check-in.
activity: activity name as string.
room: room name as string.
"""
expiry = None
for service in services:
if services.index(service) + 1 == len(services): #On the last item
expiry = expires
#~ try:
self.execute("""INSERT INTO statistics(person, date, service, expires,
checkin, checkout, code, location, activity, room)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);""",
(person, str(datetime.date.today()), service, expiry,
datetime.datetime.now(), None, code, location, activity, room),
cursor)
#~ except sqlite.Error as e:
#~ raise DatabaseError(UNKNOWN_ERROR, e.args[0])
#~ #TODO: Incrament count, update last seen date.
count = self.execute("SELECT count FROM data WHERE id = %s;", (person,))
count = int(count[0][0]) + 1
today = datetime.date.today()
self.execute("UPDATE data SET count = %s, \"lastSeen\" = %s WHERE id = %s;",
(count, today, person))
self.commit()
def DoCheckout(self, person):
"""
Marks a record as checked-out.
"""
self.execute("UPDATE statistics SET checkout = %s WHERE person = %s AND \"date\" = date('now');",
(datetime.datetime.now(), person))
self.commit()
# === end checkin functions ===
def GetHistory(self, person):
"""
Returns check-in history.
"""
a = self.execute("SELECT date, service, checkin, checkout, room, location FROM statistics WHERE person = %s;", (person,))
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
return ret
def GetStatus(self, ref, full=False):
"""
Returns the check-in status for a specified record, according to the
constants defined in taxidi.py. (STATUS_NONE, STATUS_CHECKED_IN, or
STATUS_CHECKED_OUT). If full=True, then the status is returned as part
of a dictionary of the matching statistics row. Only returns values from
today's date.
"""
a = self.execute("SELECT * FROM statistics WHERE person = %s AND checkin > date('now');", (ref,))
ret = []
for i in a:
ret.append(self.dict_factory(i)) #return as a nested dictionary
if len(ret) == 0:
if full:
return { 'status': taxidi.STATUS_NONE, 'code': None }
return taxidi.STATUS_NONE
elif len(ret) == 1:
#Only one check-in. Return what's appropriate:
ret = ret[0]
else:
#Just check the last check-in for now
ret = ret[-1]
if ret['checkin'] == None: #Not checked-in (this shouldn't happen)
if full:
ret['status'] = taxidi.STATUS_NONE
return ret
return taxidi.STATUS_NONE
else:
if ret['checkout'] == None: #Checked-in
if full:
ret['status'] = taxidi.STATUS_CHECKED_IN
return ret
return taxidi.STATUS_CHECKED_IN
else:
if full:
ret['status'] = taxidi.STATUS_CHECKED_OUT
return ret
return taxidi.STATUS_CHECKED_OUT
class DatabaseError(Exception):
def __init__(self, code, value=''):
if value == '':
self.error = 'Generic database error.'
if code == EMPTY_RESULT:
self.error = 'Query returned empty result'
elif code == CONSTRAINT_FAILED:
self.error = 'Unique key constraint failed.'
elif code == USER_EXISTS:
self.error = 'The user specified already exists.'
elif code == INVALID_PASSWORD:
self.error = 'Invalid username, password, or authorization specification.'
else:
self.error = str(value).replace('\t', '').capitalize()
self.code = code
def __str__(self):
return str(self.error).replace('\t', '').capitalize()
#~ return repr(self.error)
if __name__ == '__main__':
try:
db = Database('localhost:15432', 'taxidi', 'taxidi', 'lamepass')
except DatabaseError as e:
print e.error
exit()
import pprint
#~ newRecord = db.Register("Zac", "Sturgeon", "(212) 555-5555", "Diana Sturgeon")
#~ db.Delete(newRecord)
#~ print db.execute("SELECT * FROM \"data\";")
#~ pprint.pprint( db.Search('sturgeon') )
#~ db.Update(12, "Zachary", "Sturgeon", "(212) 555-5555", "James Sturgeon")
#Barcode functions:
#~ db.AddBarcode(1, '12345')
#~ db.RemoveBarcode(1)
#~ pprint.pprint(db.Search("ABCD"))
#~ codes = db.GetBarcodes(2)
#~ pprint.pprint(codes)
#~ print
#~ print [ a['value'] for a in codes ]
print db.Search("9989")
#Services:
#~ db.AddService('First Service')
#~ print db.GetServices()
#Activities:
#~ db.AddActivity('Explorers', securityTag=True, securityMode='md5',
#~ nametagEnable=True, parentTagEnable=True,
#~ alert='Nursery alert text goes here.')
#~ db.AddActivity('Outfitters', securityTag=False, securityMode='simple',
#~ nametagEnable=True, parentTagEnable=False,
#~ alert='Nursery alert text goes here.')
#~ db.commit()
#~ print db.GetActivityById(1)
#User functions:
#~ db.RemoveUser('admin')
#~ db.commit()
#~ if db.AddUser('admin', 'password', admin=True) == USER_EXISTS: print "User admin already exists"
#~ db.commit()
#~ pprint.pprint( db.GetUsers() )
#~ print
#~ print (db.AuthenticateUser('admin', 'badpassword') == AUTHORIZED) #False
#~ print (db.AuthenticateUser('baduser', 'pass') == AUTHORIZED) #False
#~ print (db.AuthenticateUser(u'admin', u'password') == AUTHORIZED) #True
#~ print (db.AuthenticateUser('admin', 'password') == AUTHORIZED) #True
#Check-in:
#~ db.DoCheckin(2, ('First Service', 'Second Service', 'Third Service'),
#~ '14:59:59', '5C55', 'Kiosk1', 'Explorers', 'Jungle Room')
#Rooms:
#~ db.AddRoom("Bunnies", 1)
#~ db.AddRoom("Ducks", 1)
#~ db.AddRoom("Kittens", 1)
#~ db.AddRoom("Robins", 1)
#~ db.AddRoom("Squirrels", 1)
#~ db.AddRoom("Puppies", 1)
#~ db.AddRoom("Caterpillars", 1)
#~ db.AddRoom("Butterflies", 1)
#~ db.AddRoom("Turtles", 1)
#~ db.AddRoom("Frogs", 1)
#~ db.AddRoom("Outfitters", 2)
#~ pprint.pprint(db.GetRoomByID(8))
pprint.pprint(db.GetHistory(1))
db.commit()
| gpl-3.0 | 7,439,901,087,887,462,000 | 3,372,829,733,799,053,000 | 40.754852 | 129 | 0.538554 | false |
awkspace/ansible | lib/ansible/modules/identity/ipa/ipa_sudocmdgroup.py | 71 | 6070 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ipa_sudocmdgroup
author: Thomas Krahn (@Nosmoht)
short_description: Manage FreeIPA sudo command group
description:
- Add, modify or delete sudo command group within IPA server using IPA API.
options:
cn:
description:
- Sudo Command Group.
aliases: ['name']
required: true
description:
description:
- Group description.
state:
description: State to ensure
default: present
choices: ['present', 'absent', 'enabled', 'disabled']
sudocmd:
description:
- List of sudo commands to assign to the group.
- If an empty list is passed all assigned commands will be removed from the group.
- If option is omitted sudo commands will not be checked or changed.
extends_documentation_fragment: ipa.documentation
version_added: "2.3"
'''
EXAMPLES = '''
- name: Ensure sudo command group exists
ipa_sudocmdgroup:
name: group01
description: Group of important commands
sudocmd:
- su
ipa_host: ipa.example.com
ipa_user: admin
ipa_pass: topsecret
- name: Ensure sudo command group does not exist
ipa_sudocmdgroup:
name: group01
state: absent
ipa_host: ipa.example.com
ipa_user: admin
ipa_pass: topsecret
'''
RETURN = '''
sudocmdgroup:
description: Sudo command group as returned by IPA API
returned: always
type: dict
'''
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ipa import IPAClient, ipa_argument_spec
from ansible.module_utils._text import to_native
class SudoCmdGroupIPAClient(IPAClient):
def __init__(self, module, host, port, protocol):
super(SudoCmdGroupIPAClient, self).__init__(module, host, port, protocol)
def sudocmdgroup_find(self, name):
return self._post_json(method='sudocmdgroup_find', name=None, item={'all': True, 'cn': name})
def sudocmdgroup_add(self, name, item):
return self._post_json(method='sudocmdgroup_add', name=name, item=item)
def sudocmdgroup_mod(self, name, item):
return self._post_json(method='sudocmdgroup_mod', name=name, item=item)
def sudocmdgroup_del(self, name):
return self._post_json(method='sudocmdgroup_del', name=name)
def sudocmdgroup_add_member(self, name, item):
return self._post_json(method='sudocmdgroup_add_member', name=name, item=item)
def sudocmdgroup_add_member_sudocmd(self, name, item):
return self.sudocmdgroup_add_member(name=name, item={'sudocmd': item})
def sudocmdgroup_remove_member(self, name, item):
return self._post_json(method='sudocmdgroup_remove_member', name=name, item=item)
def sudocmdgroup_remove_member_sudocmd(self, name, item):
return self.sudocmdgroup_remove_member(name=name, item={'sudocmd': item})
def get_sudocmdgroup_dict(description=None):
data = {}
if description is not None:
data['description'] = description
return data
def get_sudocmdgroup_diff(client, ipa_sudocmdgroup, module_sudocmdgroup):
return client.get_diff(ipa_data=ipa_sudocmdgroup, module_data=module_sudocmdgroup)
def ensure(module, client):
name = module.params['cn']
state = module.params['state']
sudocmd = module.params['sudocmd']
module_sudocmdgroup = get_sudocmdgroup_dict(description=module.params['description'])
ipa_sudocmdgroup = client.sudocmdgroup_find(name=name)
changed = False
if state == 'present':
if not ipa_sudocmdgroup:
changed = True
if not module.check_mode:
ipa_sudocmdgroup = client.sudocmdgroup_add(name=name, item=module_sudocmdgroup)
else:
diff = get_sudocmdgroup_diff(client, ipa_sudocmdgroup, module_sudocmdgroup)
if len(diff) > 0:
changed = True
if not module.check_mode:
data = {}
for key in diff:
data[key] = module_sudocmdgroup.get(key)
client.sudocmdgroup_mod(name=name, item=data)
if sudocmd is not None:
changed = client.modify_if_diff(name, ipa_sudocmdgroup.get('member_sudocmd', []), sudocmd,
client.sudocmdgroup_add_member_sudocmd,
client.sudocmdgroup_remove_member_sudocmd)
else:
if ipa_sudocmdgroup:
changed = True
if not module.check_mode:
client.sudocmdgroup_del(name=name)
return changed, client.sudocmdgroup_find(name=name)
def main():
argument_spec = ipa_argument_spec()
argument_spec.update(cn=dict(type='str', required=True, aliases=['name']),
description=dict(type='str'),
state=dict(type='str', default='present', choices=['present', 'absent', 'enabled', 'disabled']),
sudocmd=dict(type='list'))
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
client = SudoCmdGroupIPAClient(module=module,
host=module.params['ipa_host'],
port=module.params['ipa_port'],
protocol=module.params['ipa_prot'])
try:
client.login(username=module.params['ipa_user'],
password=module.params['ipa_pass'])
changed, sudocmdgroup = ensure(module, client)
module.exit_json(changed=changed, sudorule=sudocmdgroup)
except Exception as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
if __name__ == '__main__':
main()
| gpl-3.0 | -6,192,656,142,439,793,000 | 1,074,505,045,565,582,000 | 33.101124 | 121 | 0.636079 | false |
harshilasu/LinkurApp | y/google-cloud-sdk/platform/gsutil/third_party/boto/boto/contrib/ymlmessage.py | 20 | 1879 | # Copyright (c) 2006,2007 Chris Moyer
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
This module was contributed by Chris Moyer. It provides a subclass of the
SQS Message class that supports YAML as the body of the message.
This module requires the yaml module.
"""
from boto.sqs.message import Message
import yaml
class YAMLMessage(Message):
"""
The YAMLMessage class provides a YAML compatible message. Encoding and
decoding are handled automaticaly.
Access this message data like such:
m.data = [ 1, 2, 3]
m.data[0] # Returns 1
This depends on the PyYAML package
"""
def __init__(self, queue=None, body='', xml_attrs=None):
self.data = None
super(YAMLMessage, self).__init__(queue, body)
def set_body(self, body):
self.data = yaml.load(body)
def get_body(self):
return yaml.dump(self.data)
| gpl-3.0 | -7,058,146,644,904,743,000 | 6,050,379,254,532,201,000 | 35.134615 | 74 | 0.727515 | false |
jaruba/chromium.src | chrome/common/extensions/docs/server2/chroot_file_system.py | 85 | 1890 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import posixpath
from docs_server_utils import StringIdentity
from file_system import FileSystem
from future import Future
class ChrootFileSystem(FileSystem):
'''ChrootFileSystem(fs, path) exposes a FileSystem whose root is |path| inside
|fs|, so ChrootFileSystem(fs, 'hello').Read(['world']) is equivalent to
fs.Read(['hello/world']) with the 'hello' prefix stripped from the result.
'''
def __init__(self, file_system, root):
'''Parameters:
|file_system| The FileSystem instance to transpose paths of.
|root| The path to transpose all Read/Stat calls by.
'''
self._file_system = file_system
self._root = root.strip('/')
def Read(self, paths, skip_not_found=False):
# Maintain reverse mapping so the result can be mapped to the original
# paths given (the result from |file_system| will include |root| in the
# result, which would be wrong).
prefixed_paths = {}
def prefix(path):
prefixed = posixpath.join(self._root, path)
prefixed_paths[prefixed] = path
return prefixed
def next(results):
return dict((prefixed_paths[path], content)
for path, content in results.iteritems())
return self._file_system.Read(tuple(prefix(path) for path in paths),
skip_not_found-skip_not_found).Then(next)
def Refresh(self):
return self._file_system.Refresh()
def Stat(self, path):
return self._file_system.Stat(posixpath.join(self._root, path))
def GetIdentity(self):
return StringIdentity(
'%s/%s' % (self._file_system.GetIdentity(), self._root))
def __repr__(self):
return 'ChrootFileSystem(%s, %s)' % (
self._root, repr(self._file_system))
| bsd-3-clause | -9,189,215,866,449,492,000 | -2,097,993,173,764,753,400 | 34.660377 | 80 | 0.667196 | false |
bdh1011/cupeye | venv/lib/python2.7/site-packages/pip/req/req_requirement.py | 118 | 1245 | from pip._vendor.packaging.version import parse as parse_version
class InstallationCandidate(object):
def __init__(self, project, version, location):
self.project = project
self.version = parse_version(version)
self.location = location
self._key = (self.project, self.version, self.location)
def __repr__(self):
return "<InstallationCandidate({0!r}, {1!r}, {2!r})>".format(
self.project, self.version, self.location,
)
def __hash__(self):
return hash(self._key)
def __lt__(self, other):
return self._compare(other, lambda s, o: s < o)
def __le__(self, other):
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other):
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other):
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other):
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other):
return self._compare(other, lambda s, o: s != o)
def _compare(self, other, method):
if not isinstance(other, InstallationCandidate):
return NotImplemented
return method(self._key, other._key)
| bsd-3-clause | 344,604,365,282,555,400 | -6,267,682,511,717,789,000 | 27.953488 | 69 | 0.585542 | false |
mottosso/mindbender-setup | bin/windows/python36/Lib/http/cookies.py | 6 | 21257 | ####
# Copyright 2000 by Timothy O'Malley <timo@alum.mit.edu>
#
# All Rights Reserved
#
# Permission to use, copy, modify, and distribute this software
# and its documentation for any purpose and without fee is hereby
# granted, provided that the above copyright notice appear in all
# copies and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Timothy O'Malley not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# Timothy O'Malley DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL Timothy O'Malley BE LIABLE FOR
# ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
#
####
#
# Id: Cookie.py,v 2.29 2000/08/23 05:28:49 timo Exp
# by Timothy O'Malley <timo@alum.mit.edu>
#
# Cookie.py is a Python module for the handling of HTTP
# cookies as a Python dictionary. See RFC 2109 for more
# information on cookies.
#
# The original idea to treat Cookies as a dictionary came from
# Dave Mitchell (davem@magnet.com) in 1995, when he released the
# first version of nscookie.py.
#
####
r"""
Here's a sample session to show how to use this module.
At the moment, this is the only documentation.
The Basics
----------
Importing is easy...
>>> from http import cookies
Most of the time you start by creating a cookie.
>>> C = cookies.SimpleCookie()
Once you've created your Cookie, you can add values just as if it were
a dictionary.
>>> C = cookies.SimpleCookie()
>>> C["fig"] = "newton"
>>> C["sugar"] = "wafer"
>>> C.output()
'Set-Cookie: fig=newton\r\nSet-Cookie: sugar=wafer'
Notice that the printable representation of a Cookie is the
appropriate format for a Set-Cookie: header. This is the
default behavior. You can change the header and printed
attributes by using the .output() function
>>> C = cookies.SimpleCookie()
>>> C["rocky"] = "road"
>>> C["rocky"]["path"] = "/cookie"
>>> print(C.output(header="Cookie:"))
Cookie: rocky=road; Path=/cookie
>>> print(C.output(attrs=[], header="Cookie:"))
Cookie: rocky=road
The load() method of a Cookie extracts cookies from a string. In a
CGI script, you would use this method to extract the cookies from the
HTTP_COOKIE environment variable.
>>> C = cookies.SimpleCookie()
>>> C.load("chips=ahoy; vienna=finger")
>>> C.output()
'Set-Cookie: chips=ahoy\r\nSet-Cookie: vienna=finger'
The load() method is darn-tootin smart about identifying cookies
within a string. Escaped quotation marks, nested semicolons, and other
such trickeries do not confuse it.
>>> C = cookies.SimpleCookie()
>>> C.load('keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;";')
>>> print(C)
Set-Cookie: keebler="E=everybody; L=\"Loves\"; fudge=\012;"
Each element of the Cookie also supports all of the RFC 2109
Cookie attributes. Here's an example which sets the Path
attribute.
>>> C = cookies.SimpleCookie()
>>> C["oreo"] = "doublestuff"
>>> C["oreo"]["path"] = "/"
>>> print(C)
Set-Cookie: oreo=doublestuff; Path=/
Each dictionary element has a 'value' attribute, which gives you
back the value associated with the key.
>>> C = cookies.SimpleCookie()
>>> C["twix"] = "none for you"
>>> C["twix"].value
'none for you'
The SimpleCookie expects that all values should be standard strings.
Just to be sure, SimpleCookie invokes the str() builtin to convert
the value to a string, when the values are set dictionary-style.
>>> C = cookies.SimpleCookie()
>>> C["number"] = 7
>>> C["string"] = "seven"
>>> C["number"].value
'7'
>>> C["string"].value
'seven'
>>> C.output()
'Set-Cookie: number=7\r\nSet-Cookie: string=seven'
Finis.
"""
#
# Import our required modules
#
import re
import string
__all__ = ["CookieError", "BaseCookie", "SimpleCookie"]
_nulljoin = ''.join
_semispacejoin = '; '.join
_spacejoin = ' '.join
def _warn_deprecated_setter(setter):
import warnings
msg = ('The .%s setter is deprecated. The attribute will be read-only in '
'future releases. Please use the set() method instead.' % setter)
warnings.warn(msg, DeprecationWarning, stacklevel=3)
#
# Define an exception visible to External modules
#
class CookieError(Exception):
pass
# These quoting routines conform to the RFC2109 specification, which in
# turn references the character definitions from RFC2068. They provide
# a two-way quoting algorithm. Any non-text character is translated
# into a 4 character sequence: a forward-slash followed by the
# three-digit octal equivalent of the character. Any '\' or '"' is
# quoted with a preceding '\' slash.
# Because of the way browsers really handle cookies (as opposed to what
# the RFC says) we also encode "," and ";".
#
# These are taken from RFC2068 and RFC2109.
# _LegalChars is the list of chars which don't require "'s
# _Translator hash-table for fast quoting
#
_LegalChars = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~:"
_UnescapedChars = _LegalChars + ' ()/<=>?@[]{}'
_Translator = {n: '\\%03o' % n
for n in set(range(256)) - set(map(ord, _UnescapedChars))}
_Translator.update({
ord('"'): '\\"',
ord('\\'): '\\\\',
})
_is_legal_key = re.compile('[%s]+' % re.escape(_LegalChars)).fullmatch
def _quote(str):
r"""Quote a string for use in a cookie header.
If the string does not need to be double-quoted, then just return the
string. Otherwise, surround the string in doublequotes and quote
(with a \) special characters.
"""
if str is None or _is_legal_key(str):
return str
else:
return '"' + str.translate(_Translator) + '"'
_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]")
_QuotePatt = re.compile(r"[\\].")
def _unquote(str):
# If there aren't any doublequotes,
# then there can't be any special characters. See RFC 2109.
if str is None or len(str) < 2:
return str
if str[0] != '"' or str[-1] != '"':
return str
# We have to assume that we must decode this string.
# Down to work.
# Remove the "s
str = str[1:-1]
# Check for special sequences. Examples:
# \012 --> \n
# \" --> "
#
i = 0
n = len(str)
res = []
while 0 <= i < n:
o_match = _OctalPatt.search(str, i)
q_match = _QuotePatt.search(str, i)
if not o_match and not q_match: # Neither matched
res.append(str[i:])
break
# else:
j = k = -1
if o_match:
j = o_match.start(0)
if q_match:
k = q_match.start(0)
if q_match and (not o_match or k < j): # QuotePatt matched
res.append(str[i:k])
res.append(str[k+1])
i = k + 2
else: # OctalPatt matched
res.append(str[i:j])
res.append(chr(int(str[j+1:j+4], 8)))
i = j + 4
return _nulljoin(res)
# The _getdate() routine is used to set the expiration time in the cookie's HTTP
# header. By default, _getdate() returns the current time in the appropriate
# "expires" format for a Set-Cookie header. The one optional argument is an
# offset from now, in seconds. For example, an offset of -3600 means "one hour
# ago". The offset may be a floating point number.
#
_weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
_monthname = [None,
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
def _getdate(future=0, weekdayname=_weekdayname, monthname=_monthname):
from time import gmtime, time
now = time()
year, month, day, hh, mm, ss, wd, y, z = gmtime(now + future)
return "%s, %02d %3s %4d %02d:%02d:%02d GMT" % \
(weekdayname[wd], day, monthname[month], year, hh, mm, ss)
class Morsel(dict):
"""A class to hold ONE (key, value) pair.
In a cookie, each such pair may have several attributes, so this class is
used to keep the attributes associated with the appropriate key,value pair.
This class also includes a coded_value attribute, which is used to hold
the network representation of the value. This is most useful when Python
objects are pickled for network transit.
"""
# RFC 2109 lists these attributes as reserved:
# path comment domain
# max-age secure version
#
# For historical reasons, these attributes are also reserved:
# expires
#
# This is an extension from Microsoft:
# httponly
#
# This dictionary provides a mapping from the lowercase
# variant on the left to the appropriate traditional
# formatting on the right.
_reserved = {
"expires" : "expires",
"path" : "Path",
"comment" : "Comment",
"domain" : "Domain",
"max-age" : "Max-Age",
"secure" : "Secure",
"httponly" : "HttpOnly",
"version" : "Version",
}
_flags = {'secure', 'httponly'}
def __init__(self):
# Set defaults
self._key = self._value = self._coded_value = None
# Set default attributes
for key in self._reserved:
dict.__setitem__(self, key, "")
@property
def key(self):
return self._key
@key.setter
def key(self, key):
_warn_deprecated_setter('key')
self._key = key
@property
def value(self):
return self._value
@value.setter
def value(self, value):
_warn_deprecated_setter('value')
self._value = value
@property
def coded_value(self):
return self._coded_value
@coded_value.setter
def coded_value(self, coded_value):
_warn_deprecated_setter('coded_value')
self._coded_value = coded_value
def __setitem__(self, K, V):
K = K.lower()
if not K in self._reserved:
raise CookieError("Invalid attribute %r" % (K,))
dict.__setitem__(self, K, V)
def setdefault(self, key, val=None):
key = key.lower()
if key not in self._reserved:
raise CookieError("Invalid attribute %r" % (key,))
return dict.setdefault(self, key, val)
def __eq__(self, morsel):
if not isinstance(morsel, Morsel):
return NotImplemented
return (dict.__eq__(self, morsel) and
self._value == morsel._value and
self._key == morsel._key and
self._coded_value == morsel._coded_value)
__ne__ = object.__ne__
def copy(self):
morsel = Morsel()
dict.update(morsel, self)
morsel.__dict__.update(self.__dict__)
return morsel
def update(self, values):
data = {}
for key, val in dict(values).items():
key = key.lower()
if key not in self._reserved:
raise CookieError("Invalid attribute %r" % (key,))
data[key] = val
dict.update(self, data)
def isReservedKey(self, K):
return K.lower() in self._reserved
def set(self, key, val, coded_val, LegalChars=_LegalChars):
if LegalChars != _LegalChars:
import warnings
warnings.warn(
'LegalChars parameter is deprecated, ignored and will '
'be removed in future versions.', DeprecationWarning,
stacklevel=2)
if key.lower() in self._reserved:
raise CookieError('Attempt to set a reserved key %r' % (key,))
if not _is_legal_key(key):
raise CookieError('Illegal key %r' % (key,))
# It's a good key, so save it.
self._key = key
self._value = val
self._coded_value = coded_val
def __getstate__(self):
return {
'key': self._key,
'value': self._value,
'coded_value': self._coded_value,
}
def __setstate__(self, state):
self._key = state['key']
self._value = state['value']
self._coded_value = state['coded_value']
def output(self, attrs=None, header="Set-Cookie:"):
return "%s %s" % (header, self.OutputString(attrs))
__str__ = output
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.OutputString())
def js_output(self, attrs=None):
# Print javascript
return """
<script type="text/javascript">
<!-- begin hiding
document.cookie = \"%s\";
// end hiding -->
</script>
""" % (self.OutputString(attrs).replace('"', r'\"'))
def OutputString(self, attrs=None):
# Build up our result
#
result = []
append = result.append
# First, the key=value pair
append("%s=%s" % (self.key, self.coded_value))
# Now add any defined attributes
if attrs is None:
attrs = self._reserved
items = sorted(self.items())
for key, value in items:
if value == "":
continue
if key not in attrs:
continue
if key == "expires" and isinstance(value, int):
append("%s=%s" % (self._reserved[key], _getdate(value)))
elif key == "max-age" and isinstance(value, int):
append("%s=%d" % (self._reserved[key], value))
elif key in self._flags:
if value:
append(str(self._reserved[key]))
else:
append("%s=%s" % (self._reserved[key], value))
# Return the result
return _semispacejoin(result)
#
# Pattern for finding cookie
#
# This used to be strict parsing based on the RFC2109 and RFC2068
# specifications. I have since discovered that MSIE 3.0x doesn't
# follow the character rules outlined in those specs. As a
# result, the parsing rules here are less strict.
#
_LegalKeyChars = r"\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\="
_LegalValueChars = _LegalKeyChars + r'\[\]'
_CookiePattern = re.compile(r"""
\s* # Optional whitespace at start of cookie
(?P<key> # Start of group 'key'
[""" + _LegalKeyChars + r"""]+? # Any word of at least one letter
) # End of group 'key'
( # Optional group: there may not be a value.
\s*=\s* # Equal Sign
(?P<val> # Start of group 'val'
"(?:[^\\"]|\\.)*" # Any doublequoted string
| # or
\w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr
| # or
[""" + _LegalValueChars + r"""]* # Any word or empty string
) # End of group 'val'
)? # End of optional value group
\s* # Any number of spaces.
(\s+|;|$) # Ending either at space, semicolon, or EOS.
""", re.ASCII | re.VERBOSE) # re.ASCII may be removed if safe.
# At long last, here is the cookie class. Using this class is almost just like
# using a dictionary. See this module's docstring for example usage.
#
class BaseCookie(dict):
"""A container class for a set of Morsels."""
def value_decode(self, val):
"""real_value, coded_value = value_decode(STRING)
Called prior to setting a cookie's value from the network
representation. The VALUE is the value read from HTTP
header.
Override this function to modify the behavior of cookies.
"""
return val, val
def value_encode(self, val):
"""real_value, coded_value = value_encode(VALUE)
Called prior to setting a cookie's value from the dictionary
representation. The VALUE is the value being assigned.
Override this function to modify the behavior of cookies.
"""
strval = str(val)
return strval, strval
def __init__(self, input=None):
if input:
self.load(input)
def __set(self, key, real_value, coded_value):
"""Private method for setting a cookie's value"""
M = self.get(key, Morsel())
M.set(key, real_value, coded_value)
dict.__setitem__(self, key, M)
def __setitem__(self, key, value):
"""Dictionary style assignment."""
if isinstance(value, Morsel):
# allow assignment of constructed Morsels (e.g. for pickling)
dict.__setitem__(self, key, value)
else:
rval, cval = self.value_encode(value)
self.__set(key, rval, cval)
def output(self, attrs=None, header="Set-Cookie:", sep="\015\012"):
"""Return a string suitable for HTTP."""
result = []
items = sorted(self.items())
for key, value in items:
result.append(value.output(attrs, header))
return sep.join(result)
__str__ = output
def __repr__(self):
l = []
items = sorted(self.items())
for key, value in items:
l.append('%s=%s' % (key, repr(value.value)))
return '<%s: %s>' % (self.__class__.__name__, _spacejoin(l))
def js_output(self, attrs=None):
"""Return a string suitable for JavaScript."""
result = []
items = sorted(self.items())
for key, value in items:
result.append(value.js_output(attrs))
return _nulljoin(result)
def load(self, rawdata):
"""Load cookies from a string (presumably HTTP_COOKIE) or
from a dictionary. Loading cookies from a dictionary 'd'
is equivalent to calling:
map(Cookie.__setitem__, d.keys(), d.values())
"""
if isinstance(rawdata, str):
self.__parse_string(rawdata)
else:
# self.update() wouldn't call our custom __setitem__
for key, value in rawdata.items():
self[key] = value
return
def __parse_string(self, str, patt=_CookiePattern):
i = 0 # Our starting point
n = len(str) # Length of string
parsed_items = [] # Parsed (type, key, value) triples
morsel_seen = False # A key=value pair was previously encountered
TYPE_ATTRIBUTE = 1
TYPE_KEYVALUE = 2
# We first parse the whole cookie string and reject it if it's
# syntactically invalid (this helps avoid some classes of injection
# attacks).
while 0 <= i < n:
# Start looking for a cookie
match = patt.match(str, i)
if not match:
# No more cookies
break
key, value = match.group("key"), match.group("val")
i = match.end(0)
if key[0] == "$":
if not morsel_seen:
# We ignore attributes which pertain to the cookie
# mechanism as a whole, such as "$Version".
# See RFC 2965. (Does anyone care?)
continue
parsed_items.append((TYPE_ATTRIBUTE, key[1:], value))
elif key.lower() in Morsel._reserved:
if not morsel_seen:
# Invalid cookie string
return
if value is None:
if key.lower() in Morsel._flags:
parsed_items.append((TYPE_ATTRIBUTE, key, True))
else:
# Invalid cookie string
return
else:
parsed_items.append((TYPE_ATTRIBUTE, key, _unquote(value)))
elif value is not None:
parsed_items.append((TYPE_KEYVALUE, key, self.value_decode(value)))
morsel_seen = True
else:
# Invalid cookie string
return
# The cookie string is valid, apply it.
M = None # current morsel
for tp, key, value in parsed_items:
if tp == TYPE_ATTRIBUTE:
assert M is not None
M[key] = value
else:
assert tp == TYPE_KEYVALUE
rval, cval = value
self.__set(key, rval, cval)
M = self[key]
class SimpleCookie(BaseCookie):
"""
SimpleCookie supports strings as cookie values. When setting
the value using the dictionary assignment notation, SimpleCookie
calls the builtin str() to convert the value to a string. Values
received from HTTP are kept as strings.
"""
def value_decode(self, val):
return _unquote(val), val
def value_encode(self, val):
strval = str(val)
return strval, _quote(strval)
| mit | -1,445,095,657,012,873,000 | -2,679,938,198,345,087,500 | 32.475591 | 83 | 0.572658 | false |
pcubillos/MCcubed | examples/demo02/preamble.py | 1 | 1385 | #! /usr/bin/env python
# This script generates input files used to run MCMC from the shell prompt.
# Preamble
# --------
# To correctly execute this script, one needs to set the correct paths
# to the source code. The paths are given as if the Python session
# runs from a 'run/' folder at the same level than the repo, as in:
# rootdir/
# |-- MCcubed/
# `-- run/
# Alternatively, edit the paths from this script to adjust to your
# working directory.
# Import the necessary modules:
import sys
import numpy as np
# Import the modules from the MCcubed package:
sys.path.append("../MCcubed/")
import MCcubed as mc3
# Import the modeling function:
sys.path.append("../MCcubed/examples/models/")
from quadratic import quad
# Create a synthetic dataset using a quadratic polynomial curve:
x = np.linspace(0, 10, 1000) # Independent model variable
p0 = [3, -2.4, 0.5] # True-underlying model parameters
y = quad(p0, x) # Noiseless model
uncert = np.sqrt(np.abs(y)) # Data points uncertainty
error = np.random.normal(0, uncert) # Noise for the data
data = y + error # Noisy data set
# data.npz contains the data and uncertainty arrays:
mc3.utils.savebin([data, uncert], 'data.npz')
# indp.npz contains the list of additional arguments for the model:
mc3.utils.savebin([x], 'indp.npz')
| mit | -1,250,510,690,840,464,000 | 3,611,891,509,627,724,300 | 32.780488 | 75 | 0.67509 | false |
bryx-inc/boto | boto/sdb/db/test_db.py | 153 | 5427 | import logging
import time
from datetime import datetime
from boto.sdb.db.model import Model
from boto.sdb.db.property import StringProperty, IntegerProperty, BooleanProperty
from boto.sdb.db.property import DateTimeProperty, FloatProperty, ReferenceProperty
from boto.sdb.db.property import PasswordProperty, ListProperty, MapProperty
from boto.exception import SDBPersistenceError
logging.basicConfig()
log = logging.getLogger('test_db')
log.setLevel(logging.DEBUG)
_objects = {}
#
# This will eventually be moved to the boto.tests module and become a real unit test
# but for now it will live here. It shows examples of each of the Property types in
# use and tests the basic operations.
#
class TestBasic(Model):
name = StringProperty()
size = IntegerProperty()
foo = BooleanProperty()
date = DateTimeProperty()
class TestFloat(Model):
name = StringProperty()
value = FloatProperty()
class TestRequired(Model):
req = StringProperty(required=True, default='foo')
class TestReference(Model):
ref = ReferenceProperty(reference_class=TestBasic, collection_name='refs')
class TestSubClass(TestBasic):
answer = IntegerProperty()
class TestPassword(Model):
password = PasswordProperty()
class TestList(Model):
name = StringProperty()
nums = ListProperty(int)
class TestMap(Model):
name = StringProperty()
map = MapProperty()
class TestListReference(Model):
name = StringProperty()
basics = ListProperty(TestBasic)
class TestAutoNow(Model):
create_date = DateTimeProperty(auto_now_add=True)
modified_date = DateTimeProperty(auto_now=True)
class TestUnique(Model):
name = StringProperty(unique=True)
def test_basic():
global _objects
t = TestBasic()
t.name = 'simple'
t.size = -42
t.foo = True
t.date = datetime.now()
log.debug('saving object')
t.put()
_objects['test_basic_t'] = t
time.sleep(5)
log.debug('now try retrieving it')
tt = TestBasic.get_by_id(t.id)
_objects['test_basic_tt'] = tt
assert tt.id == t.id
l = TestBasic.get_by_id([t.id])
assert len(l) == 1
assert l[0].id == t.id
assert t.size == tt.size
assert t.foo == tt.foo
assert t.name == tt.name
#assert t.date == tt.date
return t
def test_float():
global _objects
t = TestFloat()
t.name = 'float object'
t.value = 98.6
log.debug('saving object')
t.save()
_objects['test_float_t'] = t
time.sleep(5)
log.debug('now try retrieving it')
tt = TestFloat.get_by_id(t.id)
_objects['test_float_tt'] = tt
assert tt.id == t.id
assert tt.name == t.name
assert tt.value == t.value
return t
def test_required():
global _objects
t = TestRequired()
_objects['test_required_t'] = t
t.put()
return t
def test_reference(t=None):
global _objects
if not t:
t = test_basic()
tt = TestReference()
tt.ref = t
tt.put()
time.sleep(10)
tt = TestReference.get_by_id(tt.id)
_objects['test_reference_tt'] = tt
assert tt.ref.id == t.id
for o in t.refs:
log.debug(o)
def test_subclass():
global _objects
t = TestSubClass()
_objects['test_subclass_t'] = t
t.name = 'a subclass'
t.size = -489
t.save()
def test_password():
global _objects
t = TestPassword()
_objects['test_password_t'] = t
t.password = "foo"
t.save()
time.sleep(5)
# Make sure it stored ok
tt = TestPassword.get_by_id(t.id)
_objects['test_password_tt'] = tt
#Testing password equality
assert tt.password == "foo"
#Testing password not stored as string
assert str(tt.password) != "foo"
def test_list():
global _objects
t = TestList()
_objects['test_list_t'] = t
t.name = 'a list of ints'
t.nums = [1, 2, 3, 4, 5]
t.put()
tt = TestList.get_by_id(t.id)
_objects['test_list_tt'] = tt
assert tt.name == t.name
for n in tt.nums:
assert isinstance(n, int)
def test_list_reference():
global _objects
t = TestBasic()
t.put()
_objects['test_list_ref_t'] = t
tt = TestListReference()
tt.name = "foo"
tt.basics = [t]
tt.put()
time.sleep(5)
_objects['test_list_ref_tt'] = tt
ttt = TestListReference.get_by_id(tt.id)
assert ttt.basics[0].id == t.id
def test_unique():
global _objects
t = TestUnique()
name = 'foo' + str(int(time.time()))
t.name = name
t.put()
_objects['test_unique_t'] = t
time.sleep(10)
tt = TestUnique()
_objects['test_unique_tt'] = tt
tt.name = name
try:
tt.put()
assert False
except(SDBPersistenceError):
pass
def test_datetime():
global _objects
t = TestAutoNow()
t.put()
_objects['test_datetime_t'] = t
time.sleep(5)
tt = TestAutoNow.get_by_id(t.id)
assert tt.create_date.timetuple() == t.create_date.timetuple()
def test():
log.info('test_basic')
t1 = test_basic()
log.info('test_required')
test_required()
log.info('test_reference')
test_reference(t1)
log.info('test_subclass')
test_subclass()
log.info('test_password')
test_password()
log.info('test_list')
test_list()
log.info('test_list_reference')
test_list_reference()
log.info("test_datetime")
test_datetime()
log.info('test_unique')
test_unique()
if __name__ == "__main__":
test()
| mit | -4,263,495,503,308,699,000 | 4,592,326,550,558,071,300 | 22.493506 | 84 | 0.62963 | false |
jdinuncio/ansible-modules-extras | cloud/ovirt/ovirt_datacenters.py | 9 | 7296 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import traceback
try:
import ovirtsdk4.types as otypes
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
BaseModule,
check_sdk,
check_params,
create_connection,
equal,
ovirt_full_argument_spec,
search_by_name,
)
ANSIBLE_METADATA = {'status': 'preview',
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: ovirt_datacenters
short_description: Module to manage data centers in oVirt
version_added: "2.3"
author: "Ondra Machacek (@machacekondra)"
description:
- "Module to manage data centers in oVirt"
options:
name:
description:
- "Name of the the data center to manage."
required: true
state:
description:
- "Should the data center be present or absent"
choices: ['present', 'absent']
default: present
description:
description:
- "Description of the data center."
comment:
description:
- "Comment of the data center."
local:
description:
- "I(True) if the data center should be local, I(False) if should be shared."
- "Default value is set by engine."
compatibility_version:
description:
- "Compatibility version of the data center."
quota_mode:
description:
- "Quota mode of the data center. One of I(disabled), I(audit) or I(enabled)"
choices: ['disabled', 'audit', 'enabled']
mac_pool:
description:
- "MAC pool to be used by this datacenter."
- "IMPORTANT: This option is deprecated in oVirt 4.1. You should
use C(mac_pool) in C(ovirt_clusters) module, as MAC pools are
set per cluster since 4.1."
extends_documentation_fragment: ovirt
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Create datacenter
- ovirt_datacenters:
name: mydatacenter
local: True
compatibility_version: 4.0
quota_mode: enabled
# Remove datacenter
- ovirt_datacenters:
state: absent
name: mydatacenter
'''
RETURN = '''
id:
description: "ID of the managed datacenter"
returned: "On success if datacenter is found."
type: str
sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c
data_center:
description: "Dictionary of all the datacenter attributes. Datacenter attributes can be found on your oVirt instance
at following url: https://ovirt.example.com/ovirt-engine/api/model#types/datacenter."
returned: "On success if datacenter is found."
'''
class DatacentersModule(BaseModule):
def __get_major(self, full_version):
if full_version is None:
return None
if isinstance(full_version, otypes.Version):
return full_version.major
return int(full_version.split('.')[0])
def __get_minor(self, full_version):
if full_version is None:
return None
if isinstance(full_version, otypes.Version):
return full_version.minor
return int(full_version.split('.')[1])
def _get_mac_pool(self):
mac_pool = None
if self._module.params.get('mac_pool'):
mac_pool = search_by_name(
self._connection.system_service().mac_pools_service(),
self._module.params.get('mac_pool'),
)
return mac_pool
def build_entity(self):
return otypes.DataCenter(
name=self._module.params['name'],
comment=self._module.params['comment'],
description=self._module.params['description'],
mac_pool=otypes.MacPool(
id=getattr(self._get_mac_pool(), 'id', None),
) if self._module.params.get('mac_pool') else None,
quota_mode=otypes.QuotaModeType(
self._module.params['quota_mode']
) if self._module.params['quota_mode'] else None,
local=self._module.params['local'],
version=otypes.Version(
major=self.__get_major(self._module.params['compatibility_version']),
minor=self.__get_minor(self._module.params['compatibility_version']),
) if self._module.params['compatibility_version'] else None,
)
def update_check(self, entity):
minor = self.__get_minor(self._module.params.get('compatibility_version'))
major = self.__get_major(self._module.params.get('compatibility_version'))
return (
equal(getattr(self._get_mac_pool(), 'id', None), getattr(entity.mac_pool, 'id', None)) and
equal(self._module.params.get('comment'), entity.comment) and
equal(self._module.params.get('description'), entity.description) and
equal(self._module.params.get('quota_mode'), str(entity.quota_mode)) and
equal(self._module.params.get('local'), entity.local) and
equal(minor, self.__get_minor(entity.version)) and
equal(major, self.__get_major(entity.version))
)
def main():
argument_spec = ovirt_full_argument_spec(
state=dict(
choices=['present', 'absent'],
default='present',
),
name=dict(default=None, required=True),
description=dict(default=None),
local=dict(type='bool'),
compatibility_version=dict(default=None),
quota_mode=dict(choices=['disabled', 'audit', 'enabled']),
comment=dict(default=None),
mac_pool=dict(default=None),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
check_sdk(module)
check_params(module)
try:
connection = create_connection(module.params.pop('auth'))
data_centers_service = connection.system_service().data_centers_service()
clusters_module = DatacentersModule(
connection=connection,
module=module,
service=data_centers_service,
)
state = module.params['state']
if state == 'present':
ret = clusters_module.create()
elif state == 'absent':
ret = clusters_module.remove()
module.exit_json(**ret)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=False)
if __name__ == "__main__":
main()
| gpl-3.0 | 1,421,603,337,471,743,700 | 2,647,295,726,242,407,400 | 32.013575 | 120 | 0.620614 | false |
KimNorgaard/ansible-modules-extras | packaging/os/pkg5.py | 75 | 4862 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2014 Peter Oliver <ansible@mavit.org.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: pkg5
author: "Peter Oliver (@mavit)"
short_description: Manages packages with the Solaris 11 Image Packaging System
version_added: 1.9
description:
- IPS packages are the native packages in Solaris 11 and higher.
notes:
- The naming of IPS packages is explained at U(http://www.oracle.com/technetwork/articles/servers-storage-admin/ips-package-versioning-2232906.html).
options:
name:
description:
- An FRMI of the package(s) to be installed/removed/updated.
- Multiple packages may be specified, separated by C(,).
required: true
state:
description:
- Whether to install (I(present), I(latest)), or remove (I(absent)) a
package.
required: false
default: present
choices: [ present, latest, absent ]
accept_licenses:
description:
- Accept any licences.
required: false
default: false
choices: [ true, false ]
aliases: [ accept_licences, accept ]
'''
EXAMPLES = '''
# Install Vim:
- pkg5: name=editor/vim
# Remove finger daemon:
- pkg5: name=service/network/finger state=absent
# Install several packages at once:
- pkg5:
name:
- /file/gnu-findutils
- /text/gnu-grep
'''
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True, type='list'),
state=dict(
default='present',
choices=[
'present',
'installed',
'latest',
'absent',
'uninstalled',
'removed',
]
),
accept_licenses=dict(
type='bool',
default=False,
aliases=['accept_licences', 'accept'],
),
)
)
params = module.params
packages = []
# pkg(5) FRMIs include a comma before the release number, but
# AnsibleModule will have split this into multiple items for us.
# Try to spot where this has happened and fix it.
for fragment in params['name']:
if (
re.search('^\d+(?:\.\d+)*', fragment)
and packages and re.search('@[^,]*$', packages[-1])
):
packages[-1] += ',' + fragment
else:
packages.append(fragment)
if params['state'] in ['present', 'installed']:
ensure(module, 'present', packages, params)
elif params['state'] in ['latest']:
ensure(module, 'latest', packages, params)
elif params['state'] in ['absent', 'uninstalled', 'removed']:
ensure(module, 'absent', packages, params)
def ensure(module, state, packages, params):
response = {
'results': [],
'msg': '',
}
behaviour = {
'present': {
'filter': lambda p: not is_installed(module, p),
'subcommand': 'install',
},
'latest': {
'filter': lambda p: not is_latest(module, p),
'subcommand': 'install',
},
'absent': {
'filter': lambda p: is_installed(module, p),
'subcommand': 'uninstall',
},
}
if params['accept_licenses']:
accept_licenses = ['--accept']
else:
accept_licenses = []
to_modify = filter(behaviour[state]['filter'], packages)
if to_modify:
rc, out, err = module.run_command(
[
'pkg', behaviour[state]['subcommand']
]
+ accept_licenses
+ [
'-q', '--'
] + to_modify
)
response['rc'] = rc
response['results'].append(out)
response['msg'] += err
response['changed'] = True
if rc != 0:
module.fail_json(**response)
module.exit_json(**response)
def is_installed(module, package):
rc, out, err = module.run_command(['pkg', 'list', '--', package])
return not bool(int(rc))
def is_latest(module, package):
rc, out, err = module.run_command(['pkg', 'list', '-u', '--', package])
return bool(int(rc))
from ansible.module_utils.basic import *
main()
| gpl-3.0 | 7,025,542,888,701,372,000 | -7,633,072,287,106,749,000 | 27.940476 | 151 | 0.573015 | false |
andmarios/ansible-modules-core | system/user.py | 7 | 72948 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Stephen Fromm <sfromm@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: user
author: "Stephen Fromm (@sfromm)"
version_added: "0.2"
short_description: Manage user accounts
requirements: [ useradd, userdel, usermod ]
description:
- Manage user accounts and user attributes.
options:
name:
required: true
aliases: [ "user" ]
description:
- Name of the user to create, remove or modify.
comment:
required: false
description:
- Optionally sets the description (aka I(GECOS)) of user account.
uid:
required: false
description:
- Optionally sets the I(UID) of the user.
non_unique:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- Optionally when used with the -u option, this option allows to
change the user ID to a non-unique value.
version_added: "1.1"
seuser:
required: false
description:
- Optionally sets the seuser type (user_u) on selinux enabled systems.
version_added: "2.1"
group:
required: false
description:
- Optionally sets the user's primary group (takes a group name).
groups:
required: false
description:
- Puts the user in this comma-delimited list of groups. When set to
the empty string ('groups='), the user is removed from all groups
except the primary group.
append:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- If C(yes), will only add groups, not set them to just the list
in I(groups).
shell:
required: false
description:
- Optionally set the user's shell.
home:
required: false
description:
- Optionally set the user's home directory.
skeleton:
required: false
description:
- Optionally set a home skeleton directory. Requires createhome option!
password:
required: false
description:
- Optionally set the user's password to this crypted value. See
the user example in the github examples directory for what this looks
like in a playbook. See U(http://docs.ansible.com/ansible/faq.html#how-do-i-generate-crypted-passwords-for-the-user-module)
for details on various ways to generate these password values.
Note on Darwin system, this value has to be cleartext.
Beware of security issues.
state:
required: false
default: "present"
choices: [ present, absent ]
description:
- Whether the account should exist or not, taking action if the state is different from what is stated.
createhome:
required: false
default: "yes"
choices: [ "yes", "no" ]
description:
- Unless set to C(no), a home directory will be made for the user
when the account is created or if the home directory does not
exist.
move_home:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- If set to C(yes) when used with C(home=), attempt to move the
user's home directory to the specified directory if it isn't there
already.
system:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- When creating an account, setting this to C(yes) makes the user a
system account. This setting cannot be changed on existing users.
force:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- When used with C(state=absent), behavior is as with
C(userdel --force).
login_class:
required: false
description:
- Optionally sets the user's login class for FreeBSD, OpenBSD and NetBSD systems.
remove:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- When used with C(state=absent), behavior is as with
C(userdel --remove).
generate_ssh_key:
required: false
default: "no"
choices: [ "yes", "no" ]
version_added: "0.9"
description:
- Whether to generate a SSH key for the user in question.
This will B(not) overwrite an existing SSH key.
ssh_key_bits:
required: false
default: 2048
version_added: "0.9"
description:
- Optionally specify number of bits in SSH key to create.
ssh_key_type:
required: false
default: rsa
version_added: "0.9"
description:
- Optionally specify the type of SSH key to generate.
Available SSH key types will depend on implementation
present on target host.
ssh_key_file:
required: false
default: .ssh/id_rsa
version_added: "0.9"
description:
- Optionally specify the SSH key filename. If this is a relative
filename then it will be relative to the user's home directory.
ssh_key_comment:
required: false
default: ansible-generated on $HOSTNAME
version_added: "0.9"
description:
- Optionally define the comment for the SSH key.
ssh_key_passphrase:
required: false
version_added: "0.9"
description:
- Set a passphrase for the SSH key. If no
passphrase is provided, the SSH key will default to
having no passphrase.
update_password:
required: false
default: always
choices: ['always', 'on_create']
version_added: "1.3"
description:
- C(always) will update passwords if they differ. C(on_create) will only set the password for newly created users.
expires:
version_added: "1.9"
required: false
default: "None"
description:
- An expiry time for the user in epoch, it will be ignored on platforms that do not support this.
Currently supported on Linux and FreeBSD.
'''
EXAMPLES = '''
# Add the user 'johnd' with a specific uid and a primary group of 'admin'
- user: name=johnd comment="John Doe" uid=1040 group=admin
# Add the user 'james' with a bash shell, appending the group 'admins' and 'developers' to the user's groups
- user: name=james shell=/bin/bash groups=admins,developers append=yes
# Remove the user 'johnd'
- user: name=johnd state=absent remove=yes
# Create a 2048-bit SSH key for user jsmith in ~jsmith/.ssh/id_rsa
- user: name=jsmith generate_ssh_key=yes ssh_key_bits=2048 ssh_key_file=.ssh/id_rsa
# added a consultant whose account you want to expire
- user: name=james18 shell=/bin/zsh groups=developers expires=1422403387
'''
import os
import pwd
import grp
import platform
import socket
import time
try:
import spwd
HAVE_SPWD=True
except:
HAVE_SPWD=False
class User(object):
"""
This is a generic User manipulation class that is subclassed
based on platform.
A subclass may wish to override the following action methods:-
- create_user()
- remove_user()
- modify_user()
- ssh_key_gen()
- ssh_key_fingerprint()
- user_exists()
All subclasses MUST define platform and distribution (which may be None).
"""
platform = 'Generic'
distribution = None
SHADOWFILE = '/etc/shadow'
DATE_FORMAT = '%Y-%m-%d'
def __new__(cls, *args, **kwargs):
return load_platform_subclass(User, args, kwargs)
def __init__(self, module):
self.module = module
self.state = module.params['state']
self.name = module.params['name']
self.uid = module.params['uid']
self.non_unique = module.params['non_unique']
self.seuser = module.params['seuser']
self.group = module.params['group']
self.groups = module.params['groups']
self.comment = module.params['comment']
self.shell = module.params['shell']
self.password = module.params['password']
self.force = module.params['force']
self.remove = module.params['remove']
self.createhome = module.params['createhome']
self.move_home = module.params['move_home']
self.skeleton = module.params['skeleton']
self.system = module.params['system']
self.login_class = module.params['login_class']
self.append = module.params['append']
self.sshkeygen = module.params['generate_ssh_key']
self.ssh_bits = module.params['ssh_key_bits']
self.ssh_type = module.params['ssh_key_type']
self.ssh_comment = module.params['ssh_key_comment']
self.ssh_passphrase = module.params['ssh_key_passphrase']
self.update_password = module.params['update_password']
self.home = None
self.expires = None
if module.params['home'] is not None:
self.home = os.path.expanduser(module.params['home'])
if module.params['expires']:
try:
self.expires = time.gmtime(module.params['expires'])
except Exception,e:
module.fail_json("Invalid expires time %s: %s" %(self.expires, str(e)))
if module.params['ssh_key_file'] is not None:
self.ssh_file = module.params['ssh_key_file']
else:
self.ssh_file = os.path.join('.ssh', 'id_%s' % self.ssh_type)
def execute_command(self, cmd, use_unsafe_shell=False, data=None):
return self.module.run_command(cmd, use_unsafe_shell=use_unsafe_shell, data=data)
def remove_user_userdel(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.force:
cmd.append('-f')
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def create_user_useradd(self, command_name='useradd'):
cmd = [self.module.get_bin_path(command_name, True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.seuser is not None:
cmd.append('-Z')
cmd.append(self.seuser)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
elif self.group_exists(self.name):
# use the -N option (no user group) if a group already
# exists with the same name as the user to prevent
# errors from useradd trying to create a group when
# USERGROUPS_ENAB is set in /etc/login.defs.
if os.path.exists('/etc/redhat-release'):
dist = platform.dist()
major_release = int(dist[1].split('.')[0])
if major_release <= 5:
cmd.append('-n')
else:
cmd.append('-N')
else:
cmd.append('-N')
if self.groups is not None and len(self.groups):
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.expires:
cmd.append('--expiredate')
cmd.append(time.strftime(self.DATE_FORMAT, self.expires))
if self.password is not None:
cmd.append('-p')
cmd.append(self.password)
if self.createhome:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
else:
cmd.append('-M')
if self.system:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def _check_usermod_append(self):
# check if this version of usermod can append groups
usermod_path = self.module.get_bin_path('usermod', True)
# for some reason, usermod --help cannot be used by non root
# on RH/Fedora, due to lack of execute bit for others
if not os.access(usermod_path, os.X_OK):
return False
cmd = [usermod_path]
cmd.append('--help')
rc, data1, data2 = self.execute_command(cmd)
helpout = data1 + data2
# check if --append exists
lines = helpout.split('\n')
for line in lines:
if line.strip().startswith('-a, --append'):
return True
return False
def modify_user_usermod(self):
cmd = [self.module.get_bin_path('usermod', True)]
info = self.user_info()
has_append = self._check_usermod_append()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set(remove_existing=False)
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
if has_append:
cmd.append('-a')
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
if self.append and not has_append:
cmd.append('-A')
cmd.append(','.join(group_diff))
else:
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
cmd.append('-d')
cmd.append(self.home)
if self.move_home:
cmd.append('-m')
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.expires:
cmd.append('--expiredate')
cmd.append(time.strftime(self.DATE_FORMAT, self.expires))
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd.append('-p')
cmd.append(self.password)
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
elif self.module.check_mode:
return (0, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
def group_exists(self,group):
try:
# Try group as a gid first
grp.getgrgid(int(group))
return True
except (ValueError, KeyError):
try:
grp.getgrnam(group)
return True
except KeyError:
return False
def group_info(self, group):
if not self.group_exists(group):
return False
try:
# Try group as a gid first
return list(grp.getgrgid(int(group)))
except (ValueError, KeyError):
return list(grp.getgrnam(group))
def get_groups_set(self, remove_existing=True):
if self.groups is None:
return None
info = self.user_info()
groups = set(filter(None, self.groups.split(',')))
for g in set(groups):
if not self.group_exists(g):
self.module.fail_json(msg="Group %s does not exist" % (g))
if info and remove_existing and self.group_info(g)[2] == info[3]:
groups.remove(g)
return groups
def user_group_membership(self):
groups = []
info = self.get_pwd_info()
for group in grp.getgrall():
if self.name in group.gr_mem and not info[3] == group.gr_gid:
groups.append(group[0])
return groups
def user_exists(self):
try:
if pwd.getpwnam(self.name):
return True
except KeyError:
return False
def get_pwd_info(self):
if not self.user_exists():
return False
return list(pwd.getpwnam(self.name))
def user_info(self):
if not self.user_exists():
return False
info = self.get_pwd_info()
if len(info[1]) == 1 or len(info[1]) == 0:
info[1] = self.user_password()
return info
def user_password(self):
passwd = ''
if HAVE_SPWD:
try:
passwd = spwd.getspnam(self.name)[1]
except KeyError:
return passwd
if not self.user_exists():
return passwd
elif self.SHADOWFILE:
# Read shadow file for user's encrypted password string
if os.path.exists(self.SHADOWFILE) and os.access(self.SHADOWFILE, os.R_OK):
for line in open(self.SHADOWFILE).readlines():
if line.startswith('%s:' % self.name):
passwd = line.split(':')[1]
return passwd
def get_ssh_key_path(self):
info = self.user_info()
if os.path.isabs(self.ssh_file):
ssh_key_file = self.ssh_file
else:
ssh_key_file = os.path.join(info[5], self.ssh_file)
return ssh_key_file
def ssh_key_gen(self):
info = self.user_info()
if not os.path.exists(info[5]) and not self.module.check_mode:
return (1, '', 'User %s home directory does not exist' % self.name)
ssh_key_file = self.get_ssh_key_path()
ssh_dir = os.path.dirname(ssh_key_file)
if not os.path.exists(ssh_dir):
if self.module.check_mode:
return (0, '', '')
try:
os.mkdir(ssh_dir, 0700)
os.chown(ssh_dir, info[2], info[3])
except OSError, e:
return (1, '', 'Failed to create %s: %s' % (ssh_dir, str(e)))
if os.path.exists(ssh_key_file):
return (None, 'Key already exists', '')
if self.module.check_mode:
return (0, '', '')
cmd = [self.module.get_bin_path('ssh-keygen', True)]
cmd.append('-t')
cmd.append(self.ssh_type)
cmd.append('-b')
cmd.append(self.ssh_bits)
cmd.append('-C')
cmd.append(self.ssh_comment)
cmd.append('-f')
cmd.append(ssh_key_file)
cmd.append('-N')
if self.ssh_passphrase is not None:
cmd.append(self.ssh_passphrase)
else:
cmd.append('')
(rc, out, err) = self.execute_command(cmd)
if rc == 0:
# If the keys were successfully created, we should be able
# to tweak ownership.
os.chown(ssh_key_file, info[2], info[3])
os.chown('%s.pub' % ssh_key_file, info[2], info[3])
return (rc, out, err)
def ssh_key_fingerprint(self):
ssh_key_file = self.get_ssh_key_path()
if not os.path.exists(ssh_key_file):
return (1, 'SSH Key file %s does not exist' % ssh_key_file, '')
cmd = [ self.module.get_bin_path('ssh-keygen', True) ]
cmd.append('-l')
cmd.append('-f')
cmd.append(ssh_key_file)
return self.execute_command(cmd)
def get_ssh_public_key(self):
ssh_public_key_file = '%s.pub' % self.get_ssh_key_path()
try:
f = open(ssh_public_key_file)
ssh_public_key = f.read().strip()
f.close()
except IOError:
return None
return ssh_public_key
def create_user(self):
# by default we use the create_user_useradd method
return self.create_user_useradd()
def remove_user(self):
# by default we use the remove_user_userdel method
return self.remove_user_userdel()
def modify_user(self):
# by default we use the modify_user_usermod method
return self.modify_user_usermod()
def create_homedir(self, path):
if not os.path.exists(path):
if self.skeleton is not None:
skeleton = self.skeleton
else:
skeleton = '/etc/skel'
if os.path.exists(skeleton):
try:
shutil.copytree(skeleton, path, symlinks=True)
except OSError, e:
self.module.exit_json(failed=True, msg="%s" % e)
else:
try:
os.makedirs(path)
except OSError, e:
self.module.exit_json(failed=True, msg="%s" % e)
def chown_homedir(self, uid, gid, path):
try:
os.chown(path, uid, gid)
for root, dirs, files in os.walk(path):
for d in dirs:
os.chown(path, uid, gid)
for f in files:
os.chown(os.path.join(root, f), uid, gid)
except OSError, e:
self.module.exit_json(failed=True, msg="%s" % e)
# ===========================================
class FreeBsdUser(User):
"""
This is a FreeBSD User manipulation class - it uses the pw command
to manipulate the user database, followed by the chpass command
to change the password.
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'FreeBSD'
distribution = None
SHADOWFILE = '/etc/master.passwd'
def remove_user(self):
cmd = [
self.module.get_bin_path('pw', True),
'userdel',
'-n',
self.name
]
if self.remove:
cmd.append('-r')
return self.execute_command(cmd)
def create_user(self):
cmd = [
self.module.get_bin_path('pw', True),
'useradd',
'-n',
self.name,
]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.createhome:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.expires:
days =( time.mktime(self.expires) - time.time() ) / 86400
cmd.append('-e')
cmd.append(str(int(days)))
# system cannot be handled currently - should we error if its requested?
# create the user
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
# we have to set the password in a second command
if self.password is not None:
cmd = [
self.module.get_bin_path('chpass', True),
'-p',
self.password,
self.name
]
return self.execute_command(cmd)
return (rc, out, err)
def modify_user(self):
cmd = [
self.module.get_bin_path('pw', True),
'usermod',
'-n',
self.name
]
cmd_len = len(cmd)
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
# find current login class
user_login_class = None
if os.path.exists(self.SHADOWFILE) and os.access(self.SHADOWFILE, os.R_OK):
for line in open(self.SHADOWFILE).readlines():
if line.startswith('%s:' % self.name):
user_login_class = line.split(':')[4]
# act only if login_class change
if self.login_class != user_login_class:
cmd.append('-L')
cmd.append(self.login_class)
if self.groups is not None:
current_groups = self.user_group_membership()
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
groups_need_mod = False
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
new_groups = groups
if self.append:
new_groups = groups | set(current_groups)
cmd.append(','.join(new_groups))
if self.expires:
days = ( time.mktime(self.expires) - time.time() ) / 86400
cmd.append('-e')
cmd.append(str(int(days)))
# modify the user if cmd will do anything
if cmd_len != len(cmd):
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
else:
(rc, out, err) = (None, '', '')
# we have to set the password in a second command
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd = [
self.module.get_bin_path('chpass', True),
'-p',
self.password,
self.name
]
return self.execute_command(cmd)
return (rc, out, err)
# ===========================================
class OpenBSDUser(User):
"""
This is a OpenBSD User manipulation class.
Main differences are that OpenBSD:-
- has no concept of "system" account.
- has no force delete user
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'OpenBSD'
distribution = None
SHADOWFILE = '/etc/master.passwd'
def create_user(self):
cmd = [self.module.get_bin_path('useradd', True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.password is not None and self.password != '*':
cmd.append('-p')
cmd.append(self.password)
if self.createhome:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
cmd.append(self.name)
return self.execute_command(cmd)
def remove_user_userdel(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def modify_user(self):
cmd = [self.module.get_bin_path('usermod', True)]
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups_option = '-G'
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_option = '-S'
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append(groups_option)
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
# find current login class
user_login_class = None
userinfo_cmd = [self.module.get_bin_path('userinfo', True), self.name]
(rc, out, err) = self.execute_command(userinfo_cmd)
for line in out.splitlines():
tokens = line.split()
if tokens[0] == 'class' and len(tokens) == 2:
user_login_class = tokens[1]
# act only if login_class change
if self.login_class != user_login_class:
cmd.append('-L')
cmd.append(self.login_class)
if self.update_password == 'always' and self.password is not None \
and self.password != '*' and info[1] != self.password:
cmd.append('-p')
cmd.append(self.password)
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
elif self.module.check_mode:
return (0, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
# ===========================================
class NetBSDUser(User):
"""
This is a NetBSD User manipulation class.
Main differences are that NetBSD:-
- has no concept of "system" account.
- has no force delete user
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'NetBSD'
distribution = None
SHADOWFILE = '/etc/master.passwd'
def create_user(self):
cmd = [self.module.get_bin_path('useradd', True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
if len(groups) > 16:
self.module.fail_json(msg="Too many groups (%d) NetBSD allows for 16 max." % len(groups))
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.password is not None:
cmd.append('-p')
cmd.append(self.password)
if self.createhome:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
cmd.append(self.name)
return self.execute_command(cmd)
def remove_user_userdel(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def modify_user(self):
cmd = [self.module.get_bin_path('usermod', True)]
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups = set(current_groups).union(groups)
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
if len(groups) > 16:
self.module.fail_json(msg="Too many groups (%d) NetBSD allows for 16 max." % len(groups))
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd.append('-p')
cmd.append(self.password)
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
elif self.module.check_mode:
return (0, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
# ===========================================
class SunOS(User):
"""
This is a SunOS User manipulation class - The main difference between
this class and the generic user class is that Solaris-type distros
don't support the concept of a "system" account and we need to
edit the /etc/shadow file manually to set a password. (Ugh)
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'SunOS'
distribution = None
SHADOWFILE = '/etc/shadow'
def remove_user(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def create_user(self):
cmd = [self.module.get_bin_path('useradd', True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.createhome:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
cmd.append(self.name)
if self.module.check_mode:
return (0, '', '')
else:
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
# we have to set the password by editing the /etc/shadow file
if self.password is not None:
try:
lines = []
for line in open(self.SHADOWFILE, 'rb').readlines():
fields = line.strip().split(':')
if not fields[0] == self.name:
lines.append(line)
continue
fields[1] = self.password
fields[2] = str(int(time.time() / 86400))
line = ':'.join(fields)
lines.append('%s\n' % line)
open(self.SHADOWFILE, 'w+').writelines(lines)
except Exception, err:
self.module.fail_json(msg="failed to update users password: %s" % str(err))
return (rc, out, err)
def modify_user_usermod(self):
cmd = [self.module.get_bin_path('usermod', True)]
cmd_len = len(cmd)
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
groups_need_mod = False
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
new_groups = groups
if self.append:
new_groups.update(current_groups)
cmd.append(','.join(new_groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
# modify the user if cmd will do anything
if cmd_len != len(cmd):
(rc, out, err) = (0, '', '')
if not self.module.check_mode:
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
else:
(rc, out, err) = (None, '', '')
# we have to set the password by editing the /etc/shadow file
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
(rc, out, err) = (0, '', '')
if not self.module.check_mode:
try:
lines = []
for line in open(self.SHADOWFILE, 'rb').readlines():
fields = line.strip().split(':')
if not fields[0] == self.name:
lines.append(line)
continue
fields[1] = self.password
fields[2] = str(int(time.time() / 86400))
line = ':'.join(fields)
lines.append('%s\n' % line)
open(self.SHADOWFILE, 'w+').writelines(lines)
rc = 0
except Exception, err:
self.module.fail_json(msg="failed to update users password: %s" % str(err))
return (rc, out, err)
# ===========================================
class DarwinUser(User):
"""
This is a Darwin Mac OS X User manipulation class.
Main differences are that Darwin:-
- Handles accounts in a database managed by dscl(1)
- Has no useradd/groupadd
- Does not create home directories
- User password must be cleartext
- UID must be given
- System users must ben under 500
This overrides the following methods from the generic class:-
- user_exists()
- create_user()
- remove_user()
- modify_user()
"""
platform = 'Darwin'
distribution = None
SHADOWFILE = None
dscl_directory = '.'
fields = [
('comment', 'RealName'),
('home', 'NFSHomeDirectory'),
('shell', 'UserShell'),
('uid', 'UniqueID'),
('group', 'PrimaryGroupID'),
]
def _get_dscl(self):
return [ self.module.get_bin_path('dscl', True), self.dscl_directory ]
def _list_user_groups(self):
cmd = self._get_dscl()
cmd += [ '-search', '/Groups', 'GroupMembership', self.name ]
(rc, out, err) = self.execute_command(cmd)
groups = []
for line in out.splitlines():
if line.startswith(' ') or line.startswith(')'):
continue
groups.append(line.split()[0])
return groups
def _get_user_property(self, property):
'''Return user PROPERTY as given my dscl(1) read or None if not found.'''
cmd = self._get_dscl()
cmd += [ '-read', '/Users/%s' % self.name, property ]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
return None
# from dscl(1)
# if property contains embedded spaces, the list will instead be
# displayed one entry per line, starting on the line after the key.
lines = out.splitlines()
#sys.stderr.write('*** |%s| %s -> %s\n' % (property, out, lines))
if len(lines) == 1:
return lines[0].split(': ')[1]
else:
if len(lines) > 2:
return '\n'.join([ lines[1].strip() ] + lines[2:])
else:
if len(lines) == 2:
return lines[1].strip()
else:
return None
def _get_next_uid(self):
'''Return the next available uid'''
cmd = self._get_dscl()
cmd += ['-list', '/Users', 'UniqueID']
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(
msg="Unable to get the next available uid",
rc=rc,
out=out,
err=err
)
max_uid = 0
for line in out.splitlines():
if max_uid < int(line.split()[1]):
max_uid = int(line.split()[1])
return max_uid + 1
def _change_user_password(self):
'''Change password for SELF.NAME against SELF.PASSWORD.
Please note that password must be cleatext.
'''
# some documentation on how is stored passwords on OSX:
# http://blog.lostpassword.com/2012/07/cracking-mac-os-x-lion-accounts-passwords/
# http://null-byte.wonderhowto.com/how-to/hack-mac-os-x-lion-passwords-0130036/
# http://pastebin.com/RYqxi7Ca
# on OSX 10.8+ hash is SALTED-SHA512-PBKDF2
# https://pythonhosted.org/passlib/lib/passlib.hash.pbkdf2_digest.html
# https://gist.github.com/nueh/8252572
cmd = self._get_dscl()
if self.password:
cmd += [ '-passwd', '/Users/%s' % self.name, self.password]
else:
cmd += [ '-create', '/Users/%s' % self.name, 'Password', '*']
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Error when changing password',
err=err, out=out, rc=rc)
return (rc, out, err)
def _make_group_numerical(self):
'''Convert SELF.GROUP to is stringed numerical value suitable for dscl.'''
if self.group is None:
self.group = 'nogroup'
try:
self.group = grp.getgrnam(self.group).gr_gid
except KeyError:
self.module.fail_json(msg='Group "%s" not found. Try to create it first using "group" module.' % self.group)
# We need to pass a string to dscl
self.group = str(self.group)
def __modify_group(self, group, action):
'''Add or remove SELF.NAME to or from GROUP depending on ACTION.
ACTION can be 'add' or 'remove' otherwhise 'remove' is assumed. '''
if action == 'add':
option = '-a'
else:
option = '-d'
cmd = [ 'dseditgroup', '-o', 'edit', option, self.name,
'-t', 'user', group ]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot %s user "%s" to group "%s".'
% (action, self.name, group),
err=err, out=out, rc=rc)
return (rc, out, err)
def _modify_group(self):
'''Add or remove SELF.NAME to or from GROUP depending on ACTION.
ACTION can be 'add' or 'remove' otherwhise 'remove' is assumed. '''
rc = 0
out = ''
err = ''
changed = False
current = set(self._list_user_groups())
if self.groups is not None:
target = set(self.groups.split(','))
else:
target = set([])
for remove in current - target:
(_rc, _err, _out) = self.__modify_group(remove, 'delete')
rc += rc
out += _out
err += _err
changed = True
for add in target - current:
(_rc, _err, _out) = self.__modify_group(add, 'add')
rc += _rc
out += _out
err += _err
changed = True
return (rc, err, out, changed)
def _update_system_user(self):
'''Hide or show user on login window according SELF.SYSTEM.
Returns 0 if a change has been made, None otherwhise.'''
plist_file = '/Library/Preferences/com.apple.loginwindow.plist'
# http://support.apple.com/kb/HT5017?viewlocale=en_US
cmd = [ 'defaults', 'read', plist_file, 'HiddenUsersList' ]
(rc, out, err) = self.execute_command(cmd)
# returned value is
# (
# "_userA",
# "_UserB",
# userc
# )
hidden_users = []
for x in out.splitlines()[1:-1]:
try:
x = x.split('"')[1]
except IndexError:
x = x.strip()
hidden_users.append(x)
if self.system:
if not self.name in hidden_users:
cmd = [ 'defaults', 'write', plist_file,
'HiddenUsersList', '-array-add', self.name ]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(
msg='Cannot user "%s" to hidden user list.'
% self.name, err=err, out=out, rc=rc)
return 0
else:
if self.name in hidden_users:
del(hidden_users[hidden_users.index(self.name)])
cmd = [ 'defaults', 'write', plist_file,
'HiddenUsersList', '-array' ] + hidden_users
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(
msg='Cannot remove user "%s" from hidden user list.'
% self.name, err=err, out=out, rc=rc)
return 0
def user_exists(self):
'''Check is SELF.NAME is a known user on the system.'''
cmd = self._get_dscl()
cmd += [ '-list', '/Users/%s' % self.name]
(rc, out, err) = self.execute_command(cmd)
return rc == 0
def remove_user(self):
'''Delete SELF.NAME. If SELF.FORCE is true, remove its home directory.'''
info = self.user_info()
cmd = self._get_dscl()
cmd += [ '-delete', '/Users/%s' % self.name]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(
msg='Cannot delete user "%s".'
% self.name, err=err, out=out, rc=rc)
if self.force:
if os.path.exists(info[5]):
shutil.rmtree(info[5])
out += "Removed %s" % info[5]
return (rc, out, err)
def create_user(self, command_name='dscl'):
cmd = self._get_dscl()
cmd += [ '-create', '/Users/%s' % self.name]
(rc, err, out) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(
msg='Cannot create user "%s".'
% self.name, err=err, out=out, rc=rc)
self._make_group_numerical()
if self.uid is None:
self.uid = str(self._get_next_uid())
# Homedir is not created by default
if self.createhome:
if self.home is None:
self.home = '/Users/%s' % self.name
if not os.path.exists(self.home):
os.makedirs(self.home)
self.chown_homedir(int(self.uid), int(self.group), self.home)
for field in self.fields:
if self.__dict__.has_key(field[0]) and self.__dict__[field[0]]:
cmd = self._get_dscl()
cmd += [ '-create', '/Users/%s' % self.name,
field[1], self.__dict__[field[0]]]
(rc, _err, _out) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(
msg='Cannot add property "%s" to user "%s".'
% (field[0], self.name), err=err, out=out, rc=rc)
out += _out
err += _err
if rc != 0:
return (rc, _err, _out)
(rc, _err, _out) = self._change_user_password()
out += _out
err += _err
self._update_system_user()
# here we don't care about change status since it is a creation,
# thus changed is always true.
if self.groups:
(rc, _out, _err, changed) = self._modify_group()
out += _out
err += _err
return (rc, err, out)
def modify_user(self):
changed = None
out = ''
err = ''
if self.group:
self._make_group_numerical()
for field in self.fields:
if self.__dict__.has_key(field[0]) and self.__dict__[field[0]]:
current = self._get_user_property(field[1])
if current is None or current != self.__dict__[field[0]]:
cmd = self._get_dscl()
cmd += [ '-create', '/Users/%s' % self.name,
field[1], self.__dict__[field[0]]]
(rc, _err, _out) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(
msg='Cannot update property "%s" for user "%s".'
% (field[0], self.name), err=err, out=out, rc=rc)
changed = rc
out += _out
err += _err
if self.update_password == 'always' and self.password is not None:
(rc, _err, _out) = self._change_user_password()
out += _out
err += _err
changed = rc
if self.groups:
(rc, _out, _err, _changed) = self._modify_group()
out += _out
err += _err
if _changed is True:
changed = rc
rc = self._update_system_user()
if rc == 0:
changed = rc
return (changed, out, err)
# ===========================================
class AIX(User):
"""
This is a AIX User manipulation class.
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'AIX'
distribution = None
SHADOWFILE = '/etc/security/passwd'
def remove_user(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def create_user_useradd(self, command_name='useradd'):
cmd = [self.module.get_bin_path(command_name, True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None and len(self.groups):
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.createhome:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
# set password with chpasswd
if self.password is not None:
cmd = []
cmd.append(self.module.get_bin_path('chpasswd', True))
cmd.append('-e')
cmd.append('-c')
self.execute_command(' '.join(cmd), data="%s:%s" % (self.name, self.password))
return (rc, out, err)
def modify_user_usermod(self):
cmd = [self.module.get_bin_path('usermod', True)]
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
# skip if no changes to be made
if len(cmd) == 1:
(rc, out, err) = (None, '', '')
elif self.module.check_mode:
return (True, '', '')
else:
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
# set password with chpasswd
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd = []
cmd.append(self.module.get_bin_path('chpasswd', True))
cmd.append('-e')
cmd.append('-c')
(rc2, out2, err2) = self.execute_command(' '.join(cmd), data="%s:%s" % (self.name, self.password))
else:
(rc2, out2, err2) = (None, '', '')
if rc != None:
return (rc, out+out2, err+err2)
else:
return (rc2, out+out2, err+err2)
# ===========================================
class HPUX(User):
"""
This is a HP-UX User manipulation class.
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'HP-UX'
distribution = None
SHADOWFILE = '/etc/shadow'
def create_user(self):
cmd = ['/usr/sam/lbin/useradd.sam']
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None and len(self.groups):
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.password is not None:
cmd.append('-p')
cmd.append(self.password)
if self.createhome:
cmd.append('-m')
else:
cmd.append('-M')
if self.system:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def remove_user(self):
cmd = ['/usr/sam/lbin/userdel.sam']
if self.force:
cmd.append('-F')
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def modify_user(self):
cmd = ['/usr/sam/lbin/usermod.sam']
info = self.user_info()
has_append = self._check_usermod_append()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set(remove_existing=False)
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
if has_append:
cmd.append('-a')
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
if self.append and not has_append:
cmd.append('-A')
cmd.append(','.join(group_diff))
else:
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
cmd.append('-d')
cmd.append(self.home)
if self.move_home:
cmd.append('-m')
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd.append('-p')
cmd.append(self.password)
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
elif self.module.check_mode:
return (0, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
# ===========================================
def main():
ssh_defaults = {
'bits': '2048',
'type': 'rsa',
'passphrase': None,
'comment': 'ansible-generated on %s' % socket.gethostname()
}
module = AnsibleModule(
argument_spec = dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
name=dict(required=True, aliases=['user'], type='str'),
uid=dict(default=None, type='str'),
non_unique=dict(default='no', type='bool'),
group=dict(default=None, type='str'),
groups=dict(default=None, type='str'),
comment=dict(default=None, type='str'),
home=dict(default=None, type='str'),
shell=dict(default=None, type='str'),
password=dict(default=None, type='str', no_log=True),
login_class=dict(default=None, type='str'),
# following options are specific to selinux
seuser=dict(default=None, type='str'),
# following options are specific to userdel
force=dict(default='no', type='bool'),
remove=dict(default='no', type='bool'),
# following options are specific to useradd
createhome=dict(default='yes', type='bool'),
skeleton=dict(default=None, type='str'),
system=dict(default='no', type='bool'),
# following options are specific to usermod
move_home=dict(default='no', type='bool'),
append=dict(default='no', type='bool'),
# following are specific to ssh key generation
generate_ssh_key=dict(type='bool'),
ssh_key_bits=dict(default=ssh_defaults['bits'], type='str'),
ssh_key_type=dict(default=ssh_defaults['type'], type='str'),
ssh_key_file=dict(default=None, type='str'),
ssh_key_comment=dict(default=ssh_defaults['comment'], type='str'),
ssh_key_passphrase=dict(default=None, type='str', no_log=True),
update_password=dict(default='always',choices=['always','on_create'],type='str'),
expires=dict(default=None, type='float'),
),
supports_check_mode=True
)
user = User(module)
module.debug('User instantiated - platform %s' % user.platform)
if user.distribution:
module.debug('User instantiated - distribution %s' % user.distribution)
rc = None
out = ''
err = ''
result = {}
result['name'] = user.name
result['state'] = user.state
if user.state == 'absent':
if user.user_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = user.remove_user()
if rc != 0:
module.fail_json(name=user.name, msg=err, rc=rc)
result['force'] = user.force
result['remove'] = user.remove
elif user.state == 'present':
if not user.user_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = user.create_user()
result['system'] = user.system
result['createhome'] = user.createhome
else:
# modify user (note: this function is check mode aware)
(rc, out, err) = user.modify_user()
result['append'] = user.append
result['move_home'] = user.move_home
if rc is not None and rc != 0:
module.fail_json(name=user.name, msg=err, rc=rc)
if user.password is not None:
result['password'] = 'NOT_LOGGING_PASSWORD'
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
if user.user_exists():
info = user.user_info()
if info == False:
result['msg'] = "failed to look up user name: %s" % user.name
result['failed'] = True
result['uid'] = info[2]
result['group'] = info[3]
result['comment'] = info[4]
result['home'] = info[5]
result['shell'] = info[6]
result['uid'] = info[2]
if user.groups is not None:
result['groups'] = user.groups
# handle missing homedirs
info = user.user_info()
if user.home is None:
user.home = info[5]
if not os.path.exists(user.home) and user.createhome:
if not module.check_mode:
user.create_homedir(user.home)
user.chown_homedir(info[2], info[3], user.home)
result['changed'] = True
# deal with ssh key
if user.sshkeygen:
# generate ssh key (note: this function is check mode aware)
(rc, out, err) = user.ssh_key_gen()
if rc is not None and rc != 0:
module.fail_json(name=user.name, msg=err, rc=rc)
if rc == 0:
result['changed'] = True
(rc, out, err) = user.ssh_key_fingerprint()
if rc == 0:
result['ssh_fingerprint'] = out.strip()
else:
result['ssh_fingerprint'] = err.strip()
result['ssh_key_file'] = user.get_ssh_key_path()
result['ssh_public_key'] = user.get_ssh_public_key()
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 | -8,783,438,978,017,102,000 | -1,541,902,282,450,624,000 | 32.477742 | 137 | 0.511241 | false |
getredash/redash | redash/cli/queries.py | 3 | 1226 | from click import argument
from flask.cli import AppGroup
from sqlalchemy.orm.exc import NoResultFound
manager = AppGroup(help="Queries management commands.")
@manager.command()
@argument("query_id")
@argument("tag")
def add_tag(query_id, tag):
from redash import models
query_id = int(query_id)
try:
q = models.Query.get_by_id(query_id)
except NoResultFound:
print("Query not found.")
exit(1)
tags = q.tags
if tags is None:
tags = []
tags.append(tag)
q.tags = list(set(tags))
models.db.session.add(q)
models.db.session.commit()
print("Tag added.")
@manager.command()
@argument("query_id")
@argument("tag")
def remove_tag(query_id, tag):
from redash import models
query_id = int(query_id)
try:
q = models.Query.get_by_id(query_id)
except NoResultFound:
print("Query not found.")
exit(1)
tags = q.tags
if tags is None:
print("Tag is empty.")
exit(1)
try:
tags.remove(tag)
except ValueError:
print("Tag not found.")
exit(1)
q.tags = list(set(tags))
models.db.session.add(q)
models.db.session.commit()
print("Tag removed.")
| bsd-2-clause | -228,783,629,951,366,940 | 4,058,801,484,929,820,000 | 18.15625 | 55 | 0.607667 | false |
mtwestra/akvo-wandelenvoorwater | wvw/W4W/models.py | 1 | 6568 | from django.db import models
class school(models.Model):
class Meta:
verbose_name = "School"
verbose_name_plural="Scholen"
BRIN_NUMMER = models.CharField('BRIN code',max_length=15,blank=True, null=True)
NAAM_VOLLEDIG = models.CharField('Naam school',max_length=100,blank=True, null=True)
NAAM_STRAAT_VEST = models.CharField('Straat',max_length=100,blank=True, null=True)
NR_HUIS_VEST = models.CharField('Huisnummer',max_length=15,blank=True, null=True)
POSTCODE_VEST = models.CharField('Postcode',max_length=10,blank=True, null=True)
NAAM_PLAATS_VEST = models.CharField('Plaats',max_length=100,blank=True, null=True)
GEMEENTENAAM = models.CharField('Gemeente',max_length=100,blank=True, null=True)
GEOCODED=models.CharField('Geocoded',max_length=25,blank=True, null=True)
LONGITUDE=models.CharField('Longitude',max_length=20,blank=True, null=True)
LATITUDE=models.CharField('Latitude',max_length=20,blank=True, null=True)
NAAM_STRAAT_CORR = models.CharField('Straat',max_length=100,blank=True, null=True)
NR_HUIS_CORR = models.CharField('Huisnummer',max_length=15,blank=True, null=True)
POSTCODE_CORR = models.CharField('Postcode',max_length=10,blank=True, null=True)
NAAM_PLAATS_CORR = models.CharField('Plaats',max_length=100,blank=True, null=True)
NR_TELEFOON =models.CharField('Telefoon',max_length=15,blank=True, null=True)
NR_FAX = models.CharField('Fax',max_length=15,blank=True, null=True)
PROVINCIE_VEST = models.CharField('Provincie',max_length=100,blank=True, null=True)
NAAM_VOLLEDIG_GEZ = models.CharField('Naam',max_length=100,blank=True, null=True)
NR_ADMINISTRATIE_GEZ = models.CharField('Administratienummer',max_length=100,blank=True, null=True)
NAAM_STRAAT_COR_GEZ =models.CharField('Straat',max_length=100,blank=True, null=True)
NR_HUIS_CORR_GEZ =models.CharField('Huisnummer',max_length=15,blank=True, null=True)
POSTCODE_CORR_GEZ = models.CharField('Postcode',max_length=100,blank=True, null=True)
NAAM_PLAATS_CORR_GEZ =models.CharField('Plaats',max_length=100,blank=True, null=True)
INTERNET =models.CharField('Website',max_length=100,blank=True, null=True)
def __unicode__(self):
return self.NAAM_VOLLEDIG
class project(models.Model):
class Meta:
verbose_name = "Project"
verbose_name_plural="Projecten"
ACTIEF=models.BooleanField('Actief')
AKVO_CODE=models.IntegerField('Code Akvo project',blank=True, null=True)
PROJECT_AANDUIDING=models.CharField('Project aanduiding',max_length=100,blank=True, null=True)
PROJECT_NAAM = models.CharField('Naam contactpersoon',max_length=100,blank=True, null=True)
PROJECT_BESCHRIJVING = models.CharField('Opmerkingen',max_length=250,blank=True, null=True)
INVOER_DATUM = models.DateField('Invoerdatum',blank=True, null=True)
LAATSTE_WIJZIGING = models.DateTimeField('Laatste wijziging',blank=True, null=True)
def __unicode__(self):
return u'%s' % (self.PROJECT_NAAM)
class steunpunt(models.Model):
class Meta:
verbose_name = "Steunpunt"
verbose_name_plural="Steunpunten"
ACTIEF=models.BooleanField('Actief')
NAAM = models.CharField('Naam steunpunt',max_length=100,blank=True, null=True)
LOGO_URL = models.CharField('Logo URL',max_length=200,blank=True, null=True)
WEBSITE = models.CharField('Website',max_length=100,blank=True, null=True)
USERNAME = models.CharField('Username',max_length=100,blank=True, null=True)
PASSWD = models.CharField('Password',max_length=100,blank=True, null=True)
PROJECTEN = models.ManyToManyField(project,blank=True, null=True)
NAAM_CONTACT = models.CharField('Naam contactpersoon',max_length=100,blank=True, null=True)
EMAIL_CONTACT = models.CharField('E-mail',max_length=100,blank=True, null=True)
ADRES = models.CharField('Adres',max_length=100,blank=True, null=True)
POSTCODE = models.CharField('Postcode',max_length=10,blank=True, null=True)
PLAATS = models.CharField('Plaats',max_length=100,blank=True, null=True)
NR_TELEFOON = models.CharField('Telefoon',max_length=15,blank=True, null=True)
INVOER_DATUM = models.DateTimeField('Invoerdatum',blank=True, null=True)
LAATSTE_WIJZIGING = models.DateTimeField('Laatste wijzing',blank=True, null=True)
def __unicode__(self):
return u'%s' % (self.NAAM)
class inschrijving(models.Model):
class Meta:
verbose_name = "Inschrijving"
verbose_name_plural="Inschrijvingen"
STEUNPUNT=models.ForeignKey(steunpunt, verbose_name="Steunpunt")
PROJECT=models.ForeignKey(project, verbose_name="Project")
NUM_GROEP_GR7=models.IntegerField('Aantal groepen 7', blank=True, null=True)
NUM_GROEP_GR8=models.IntegerField('Aantal groepen 8', blank=True, null=True)
NUM_GROEP_GR67=models.IntegerField('Aantal gemengde groepen 6/7', blank=True, null=True)
NUM_GROEP_GR678=models.IntegerField('Aantal gemengde groepen 6/7/8',blank=True, null=True)
NUM_GROEP_GR78=models.IntegerField('Aantal gemengde groepen 7/8', blank=True, null=True)
ACTIEF=models.BooleanField('Actief')
TOTAAL_LEERLINGEN=models.IntegerField('Totaal aantal leerlingen', blank=True, null=True)
DATUM_WANDELING=models.DateField('Datum wandeling',blank=True, null=True)
PLAATS_WANDELING=models.CharField('Plaats wandeling',max_length=100,blank=True,null=True)
EERDER_MEEGEDAAN=models.CharField('Eerder meegedaan',max_length=100,blank=True,null=True)
NAAM_SCHOOL = models.CharField('Naam school',max_length=200,blank=True,null=True)
BRIN_NUMMER = models.CharField('BRIN code',max_length=15,blank=True,null=True)
NAAM_CONTACT = models.CharField('Naam contactpersoon',max_length=100,blank=True,null=True)
EMAIL_CONTACT = models.CharField('E-mail',max_length=100,blank=True,null=True)
ADRES = models.CharField('Adres',max_length=100,blank=True,null=True)
POSTCODE = models.CharField('Postcode',max_length=10,blank=True,null=True)
PLAATS = models.CharField('Plaats',max_length=100,blank=True,null=True)
NR_TELEFOON = models.CharField('Telefoon',max_length=15,blank=True,null=True)
AKKOORD_VOORW = models.BooleanField('Akkoord met de voorwaarden?')
OPMERKINGEN = models.CharField('Opmerkingen',max_length=1000,blank=True,null=True)
INVOER_DATUM = models.DateTimeField('Invoerdatum',blank=True, null=True)
LAATSTE_WIJZIGING=models.DateTimeField('Laatste wijziging',blank=True, null=True)
GEOCODED = models.CharField('Geocode resultaat',max_length=25,blank=True,null=True,default='NONE')
LONGITUDE = models.CharField('Longitude',max_length=20,blank=True,null=True)
LATITUDE = models.CharField('Latitude',max_length=20,blank=True,null=True)
def __unicode__(self):
return u'%s' %(self.NAAM_SCHOOL)
| agpl-3.0 | -336,961,577,028,411,600 | 3,599,073,393,074,904,000 | 51.544 | 101 | 0.759592 | false |
google/compare_gan | compare_gan/architectures/resnet_init_test.py | 2 | 4302 | # coding=utf-8
# Copyright 2018 Google LLC & Hwalsuk Lee.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests weight initialization ops using ResNet5 architecture."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from compare_gan.architectures import resnet5
from compare_gan.gans import consts
import gin
import tensorflow as tf
class ResNetInitTest(tf.test.TestCase):
def setUp(self):
super(ResNetInitTest, self).setUp()
gin.clear_config()
def testInitializersOldDefault(self):
valid_initalizer = [
"kernel/Initializer/random_normal",
"bias/Initializer/Const",
# truncated_normal is the old default for conv2d.
"kernel/Initializer/truncated_normal",
"bias/Initializer/Const",
"beta/Initializer/zeros",
"gamma/Initializer/ones",
]
valid_op_names = "/({}):0$".format("|".join(valid_initalizer))
with tf.Graph().as_default():
z = tf.zeros((2, 128))
fake_image = resnet5.Generator(image_shape=(128, 128, 3))(
z, y=None, is_training=True)
resnet5.Discriminator()(fake_image, y=None, is_training=True)
for var in tf.trainable_variables():
op_name = var.initializer.inputs[1].name
self.assertRegex(op_name, valid_op_names)
def testInitializersRandomNormal(self):
gin.bind_parameter("weights.initializer", consts.NORMAL_INIT)
valid_initalizer = [
"kernel/Initializer/random_normal",
"bias/Initializer/Const",
"kernel/Initializer/random_normal",
"bias/Initializer/Const",
"beta/Initializer/zeros",
"gamma/Initializer/ones",
]
valid_op_names = "/({}):0$".format("|".join(valid_initalizer))
with tf.Graph().as_default():
z = tf.zeros((2, 128))
fake_image = resnet5.Generator(image_shape=(128, 128, 3))(
z, y=None, is_training=True)
resnet5.Discriminator()(fake_image, y=None, is_training=True)
for var in tf.trainable_variables():
op_name = var.initializer.inputs[1].name
self.assertRegex(op_name, valid_op_names)
def testInitializersTruncatedNormal(self):
gin.bind_parameter("weights.initializer", consts.TRUNCATED_INIT)
valid_initalizer = [
"kernel/Initializer/truncated_normal",
"bias/Initializer/Const",
"kernel/Initializer/truncated_normal",
"bias/Initializer/Const",
"beta/Initializer/zeros",
"gamma/Initializer/ones",
]
valid_op_names = "/({}):0$".format("|".join(valid_initalizer))
with tf.Graph().as_default():
z = tf.zeros((2, 128))
fake_image = resnet5.Generator(image_shape=(128, 128, 3))(
z, y=None, is_training=True)
resnet5.Discriminator()(fake_image, y=None, is_training=True)
for var in tf.trainable_variables():
op_name = var.initializer.inputs[1].name
self.assertRegex(op_name, valid_op_names)
def testGeneratorInitializersOrthogonal(self):
gin.bind_parameter("weights.initializer", consts.ORTHOGONAL_INIT)
valid_initalizer = [
"kernel/Initializer/mul_1",
"bias/Initializer/Const",
"kernel/Initializer/mul_1",
"bias/Initializer/Const",
"beta/Initializer/zeros",
"gamma/Initializer/ones",
]
valid_op_names = "/({}):0$".format("|".join(valid_initalizer))
with tf.Graph().as_default():
z = tf.zeros((2, 128))
fake_image = resnet5.Generator(image_shape=(128, 128, 3))(
z, y=None, is_training=True)
resnet5.Discriminator()(fake_image, y=None, is_training=True)
for var in tf.trainable_variables():
op_name = var.initializer.inputs[1].name
self.assertRegex(op_name, valid_op_names)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 | 4,442,105,475,770,244,600 | 4,553,204,545,424,317,000 | 36.086207 | 74 | 0.659228 | false |
Noviat/account-financial-reporting-V3-intrastat | account_chart_report/__init__.py | 34 | 1038 | # -*- coding: utf-8 -*-
###############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014 Savoir-faire Linux (<www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from . import report
from . import wizard
| agpl-3.0 | 9,141,712,847,523,259,000 | 8,041,240,226,013,703,000 | 42.25 | 79 | 0.609827 | false |
kaday/cylc | lib/cylc/run.py | 1 | 3790 | #!/usr/bin/env python
# THIS FILE IS PART OF THE CYLC SUITE ENGINE.
# Copyright (C) 2008-2015 NIWA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Provide the main function for "cylc run" and "cylc restart"."""
import re
import sys
from daemonize import daemonize
from version import CYLC_VERSION
from cylc.cfgspec.globalcfg import GLOBAL_CFG
import flags
from exceptions import SchedulerStop, SchedulerError
def print_blurb():
logo = (
" ,_, \n"
" | | \n"
",_____,_, ,_| |_____, \n"
"| ,___| | | | | ,___| \n"
"| |___| |_| | | |___, \n"
"\_____\___, |_\_____| \n"
" ,___| | \n"
" \_____| \n"
)
license = """
The Cylc Suite Engine [%s]
Copyright (C) 2008-2015 NIWA
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
This program comes with ABSOLUTELY NO WARRANTY;
see `cylc warranty`. It is free software, you
are welcome to redistribute it under certain
conditions; see `cylc conditions`.
""" % CYLC_VERSION
logo_lines = logo.splitlines()
license_lines = license.splitlines()
lmax = max(len(line) for line in license_lines)
for i in range(len(logo_lines)):
print logo_lines[i], ('{0: ^%s}' % lmax).format(license_lines[i])
print
def main(name, start):
# Parse the command line:
server = start()
# Print copyright and license information
print_blurb()
# Create run directory tree and get port.
try:
GLOBAL_CFG.create_cylc_run_tree(server.suite)
server.configure_pyro()
except Exception as exc:
if flags.debug:
raise
else:
sys.exit(exc)
# Daemonize the suite
if not server.options.no_detach and not flags.debug:
daemonize(server)
try:
server.configure()
server.run()
# For profiling (see Python docs for how to display the stats).
# import cProfile
# cProfile.runctx('server.run()', globals(), locals(), 'stats')
except SchedulerStop, x:
# deliberate stop
print str(x)
server.shutdown()
except SchedulerError, x:
print >> sys.stderr, str(x)
server.shutdown()
sys.exit(1)
except KeyboardInterrupt as x:
import traceback
try:
server.shutdown(str(x))
except Exception as y:
# In case of exceptions in the shutdown method itself.
traceback.print_exc(y)
sys.exit(1)
except (KeyboardInterrupt, Exception) as x:
import traceback
traceback.print_exc(x)
print >> sys.stderr, "ERROR CAUGHT: cleaning up before exit"
try:
server.shutdown('ERROR: ' + str(x))
except Exception, y:
# In case of exceptions in the shutdown method itself
traceback.print_exc(y)
if flags.debug:
raise
else:
print >> sys.stderr, "THE ERROR WAS:"
print >> sys.stderr, x
print >> sys.stderr, "use --debug to turn on exception tracebacks)"
sys.exit(1)
else:
# main loop ends (not used?)
server.shutdown()
| gpl-3.0 | -8,104,658,552,001,468,000 | -467,739,471,585,562,500 | 29.32 | 79 | 0.587335 | false |
dchud/sentinel | canary/cmdline.py | 1 | 2175 | # $Id$
import optparse
import os
class CommandLine:
"""A helper class for canary command line tools. When you use
CommandLine you will get a --config option for free, and a handy
method for instantiating a Context() object.
cmdline = CommandLine()
cmdline.parse_args()
con = cmdline.context()
"""
def __init__(self):
self.parser = optparse.OptionParser('usage: %prog [options]')
self.parser.add_option('-c', '--config',
dest='config', default='conf/canary_config.py',
help='path to configuration file')
self._ran = False
def __getattr__(self,name):
"""To support attribute lookups.
"""
if hasattr(self,name):
return self.name
elif hasattr(self.options,name):
return getattr(self.options,name)
else:
raise AttributeError
def add_option(self,*args,**kwargs):
"""Similar to OptionParser.add_option
cmdline.add_option('-f', '--foo', help='foo bar')
"""
self.parser.add_option(*args,**kwargs)
def parse_args(self):
"""Similar to OptionParser.parse_args
options,args = cmdline.parse_args()
"""
if not self._ran:
self.options, self.args = self.parser.parse_args()
self._ran = True
return self.options, self.args
def context(self):
"""After you've called parse_args() you should be able
to fetch a canary.context.Context object.
context = cmdline.context()
"""
if not self.options.config:
self.guess_canary_config()
from canary.context import CanaryConfig, Context
config = CanaryConfig()
config.read_file(self.options.config)
return Context(config)
def guess_canary_config(self):
def find(arg,dname,fnames):
if 'canary_config.py' in fnames:
fnames = []
self.options.config = dname + '/canary_config.py'
os.path.walk(os.environ['HOME'],find,None)
print "using canary config at %s" % self.options.config
return self.options.config
| mit | 4,507,006,211,587,873,000 | -4,811,521,267,877,626,000 | 29.208333 | 69 | 0.586207 | false |
pierreg/tensorflow | tensorflow/python/training/training.py | 3 | 11365 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=line-too-long
"""This library provides a set of classes and functions that helps train models.
## Optimizers
The Optimizer base class provides methods to compute gradients for a loss and
apply gradients to variables. A collection of subclasses implement classic
optimization algorithms such as GradientDescent and Adagrad.
You never instantiate the Optimizer class itself, but instead instantiate one
of the subclasses.
@@Optimizer
@@GradientDescentOptimizer
@@AdadeltaOptimizer
@@AdagradOptimizer
@@AdagradDAOptimizer
@@MomentumOptimizer
@@AdamOptimizer
@@FtrlOptimizer
@@ProximalGradientDescentOptimizer
@@ProximalAdagradOptimizer
@@RMSPropOptimizer
## Gradient Computation
TensorFlow provides functions to compute the derivatives for a given
TensorFlow computation graph, adding operations to the graph. The
optimizer classes automatically compute derivatives on your graph, but
creators of new Optimizers or expert users can call the lower-level
functions below.
@@gradients
@@AggregationMethod
@@stop_gradient
## Gradient Clipping
TensorFlow provides several operations that you can use to add clipping
functions to your graph. You can use these functions to perform general data
clipping, but they're particularly useful for handling exploding or vanishing
gradients.
@@clip_by_value
@@clip_by_norm
@@clip_by_average_norm
@@clip_by_global_norm
@@global_norm
## Decaying the learning rate
@@exponential_decay
## Moving Averages
Some training algorithms, such as GradientDescent and Momentum often benefit
from maintaining a moving average of variables during optimization. Using the
moving averages for evaluations often improve results significantly.
@@ExponentialMovingAverage
## Coordinator and QueueRunner
See [Threading and Queues](../../how_tos/threading_and_queues/index.md)
for how to use threads and queues. For documentation on the Queue API,
see [Queues](../../api_docs/python/io_ops.md#queues).
@@Coordinator
@@QueueRunner
@@add_queue_runner
@@start_queue_runners
## Distributed execution
See [Distributed TensorFlow](../../how_tos/distributed/index.md) for
more information about how to configure a distributed TensorFlow program.
@@Server
@@Supervisor
@@SessionManager
@@ClusterSpec
@@replica_device_setter
@@Scaffold
@@MonitoredTrainingSession
@@SessionCreator
@@ChiefSessionCreator
@@WorkerSessionCreator
@@MonitoredSession
## Summary Operations
The following ops output
[`Summary`](https://www.tensorflow.org/code/tensorflow/core/framework/summary.proto)
protocol buffers as serialized string tensors.
You can fetch the output of a summary op in a session, and pass it to
a [SummaryWriter](../../api_docs/python/train.md#SummaryWriter) to append it
to an event file. Event files contain
[`Event`](https://www.tensorflow.org/code/tensorflow/core/util/event.proto)
protos that can contain `Summary` protos along with the timestamp and
step. You can then use TensorBoard to visualize the contents of the
event files. See [TensorBoard and
Summaries](../../how_tos/summaries_and_tensorboard/index.md) for more
details.
@@scalar_summary
@@image_summary
@@audio_summary
@@histogram_summary
@@zero_fraction
@@merge_summary
@@merge_all_summaries
## Adding Summaries to Event Files
See [Summaries and
TensorBoard](../../how_tos/summaries_and_tensorboard/index.md) for an
overview of summaries, event files, and visualization in TensorBoard.
@@SummaryWriter
@@SummaryWriterCache
@@summary_iterator
## Training utilities
@@global_step
@@basic_train_loop
@@get_global_step
@@assert_global_step
@@write_graph
@@SessionRunHook
@@LoggingTensorHook
@@StopAtStepHook
@@CheckpointSaverHook
@@NewCheckpointReader
@@StepCounterHook
@@NanLossDuringTrainingError
@@NanTensorHook
@@SummarySaverHook
@@SessionRunArgs
@@SessionRunContext
@@SessionRunValues
@@LooperThread
"""
# pylint: enable=line-too-long
# Optimizers.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys as _sys
from tensorflow.python.ops import io_ops as _io_ops
from tensorflow.python.ops import state_ops as _state_ops
from tensorflow.python.util.all_util import remove_undocumented
# pylint: disable=g-bad-import-order,unused-import
from tensorflow.python.training.adadelta import AdadeltaOptimizer
from tensorflow.python.training.adagrad import AdagradOptimizer
from tensorflow.python.training.adagrad_da import AdagradDAOptimizer
from tensorflow.python.training.proximal_adagrad import ProximalAdagradOptimizer
from tensorflow.python.training.adam import AdamOptimizer
from tensorflow.python.training.ftrl import FtrlOptimizer
from tensorflow.python.training.momentum import MomentumOptimizer
from tensorflow.python.training.moving_averages import ExponentialMovingAverage
from tensorflow.python.training.optimizer import Optimizer
from tensorflow.python.training.rmsprop import RMSPropOptimizer
from tensorflow.python.training.gradient_descent import GradientDescentOptimizer
from tensorflow.python.training.proximal_gradient_descent import ProximalGradientDescentOptimizer
from tensorflow.python.training.sync_replicas_optimizer import SyncReplicasOptimizer
from tensorflow.python.training.sync_replicas_optimizer import SyncReplicasOptimizerV2
# Utility classes for training.
from tensorflow.python.training.coordinator import Coordinator
from tensorflow.python.training.coordinator import LooperThread
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.training.queue_runner import *
# For the module level doc.
from tensorflow.python.training import input as _input
from tensorflow.python.training.input import *
from tensorflow.python.training.basic_session_run_hooks import LoggingTensorHook
from tensorflow.python.training.basic_session_run_hooks import StopAtStepHook
from tensorflow.python.training.basic_session_run_hooks import CheckpointSaverHook
from tensorflow.python.training.basic_session_run_hooks import StepCounterHook
from tensorflow.python.training.basic_session_run_hooks import NanLossDuringTrainingError
from tensorflow.python.training.basic_session_run_hooks import NanTensorHook
from tensorflow.python.training.basic_session_run_hooks import SummarySaverHook
from tensorflow.python.training.basic_loops import basic_train_loop
from tensorflow.python.training.device_setter import replica_device_setter
from tensorflow.python.training.monitored_session import Scaffold
from tensorflow.python.training.monitored_session import MonitoredTrainingSession
from tensorflow.python.training.monitored_session import SessionCreator
from tensorflow.python.training.monitored_session import ChiefSessionCreator
from tensorflow.python.training.monitored_session import WorkerSessionCreator
from tensorflow.python.training.monitored_session import MonitoredSession
from tensorflow.python.training.saver import Saver
from tensorflow.python.training.saver import checkpoint_exists
from tensorflow.python.training.saver import generate_checkpoint_state_proto
from tensorflow.python.training.saver import get_checkpoint_mtimes
from tensorflow.python.training.saver import get_checkpoint_state
from tensorflow.python.training.saver import latest_checkpoint
from tensorflow.python.training.saver import update_checkpoint_state
from tensorflow.python.training.saver import export_meta_graph
from tensorflow.python.training.saver import import_meta_graph
from tensorflow.python.training.session_run_hook import SessionRunHook
from tensorflow.python.training.session_run_hook import SessionRunArgs
from tensorflow.python.training.session_run_hook import SessionRunContext
from tensorflow.python.training.session_run_hook import SessionRunValues
from tensorflow.python.training.session_manager import SessionManager
from tensorflow.python.training.summary_io import summary_iterator
from tensorflow.python.training.summary_io import SummaryWriter
from tensorflow.python.training.summary_io import SummaryWriterCache
from tensorflow.python.training.supervisor import Supervisor
from tensorflow.python.training.training_util import write_graph
from tensorflow.python.training.training_util import global_step
from tensorflow.python.training.training_util import get_global_step
from tensorflow.python.training.training_util import assert_global_step
from tensorflow.python.pywrap_tensorflow import do_quantize_training_on_graphdef
from tensorflow.python.pywrap_tensorflow import NewCheckpointReader
# Training data protos.
from tensorflow.core.example.example_pb2 import *
from tensorflow.core.example.feature_pb2 import *
from tensorflow.core.protobuf.saver_pb2 import *
# Utility op. Open Source. TODO(touts): move to nn?
from tensorflow.python.training.learning_rate_decay import *
# Distributed computing support.
from tensorflow.core.protobuf.tensorflow_server_pb2 import ClusterDef
from tensorflow.core.protobuf.tensorflow_server_pb2 import JobDef
from tensorflow.core.protobuf.tensorflow_server_pb2 import ServerDef
from tensorflow.python.training.server_lib import ClusterSpec
from tensorflow.python.training.server_lib import Server
# Symbols whitelisted for export without documentation.
_allowed_symbols = [
# TODO(cwhipkey): review these and move to contrib or expose through
# documentation.
"generate_checkpoint_state_proto", # Used internally by saver.
"checkpoint_exists", # Only used in test?
"get_checkpoint_mtimes", # Only used in test?
# Legacy: remove.
"do_quantize_training_on_graphdef", # At least use grah_def, not graphdef.
# No uses within tensorflow.
"queue_runner", # Use tf.train.start_queue_runner etc directly.
# This is also imported internally.
# TODO(drpng): document these. The reference in howtos/distributed does
# not link.
"SyncReplicasOptimizer",
"SyncReplicasOptimizerV2",
# Protobufs:
"BytesList", # from example_pb2.
"ClusterDef",
"Example", # from example_pb2
"Feature", # from example_pb2
"Features", # from example_pb2
"FeatureList", # from example_pb2
"FeatureLists", # from example_pb2
"FloatList", # from example_pb2.
"Int64List", # from example_pb2.
"JobDef",
"SaverDef", # From saver_pb2.
"SequenceExample", # from example_pb2.
"ServerDef",
]
# Include extra modules for docstrings because:
# * Input methods in tf.train are documented in io_ops.
# * Saver methods in tf.train are documented in state_ops.
remove_undocumented(__name__, _allowed_symbols,
[_sys.modules[__name__], _io_ops, _state_ops])
| apache-2.0 | -432,266,997,593,387,970 | 9,223,309,051,402,178,000 | 36.883333 | 97 | 0.792609 | false |
joachimmetz/plaso | plaso/formatters/file_system.py | 4 | 1917 | # -*- coding: utf-8 -*-
"""File system custom event formatter helpers."""
from plaso.formatters import interface
from plaso.formatters import manager
class NTFSFileReferenceFormatterHelper(interface.CustomEventFormatterHelper):
"""NTFS file reference formatter helper."""
IDENTIFIER = 'ntfs_file_reference'
def FormatEventValues(self, event_values):
"""Formats event values using the helper.
Args:
event_values (dict[str, object]): event values.
"""
file_reference = event_values.get('file_reference', None)
if file_reference:
event_values['file_reference'] = '{0:d}-{1:d}'.format(
file_reference & 0xffffffffffff, file_reference >> 48)
class NTFSParentFileReferenceFormatterHelper(
interface.CustomEventFormatterHelper):
"""NTFS parent file reference formatter helper."""
IDENTIFIER = 'ntfs_parent_file_reference'
def FormatEventValues(self, event_values):
"""Formats event values using the helper.
Args:
event_values (dict[str, object]): event values.
"""
parent_file_reference = event_values.get('parent_file_reference', None)
if parent_file_reference:
event_values['parent_file_reference'] = '{0:d}-{1:d}'.format(
parent_file_reference & 0xffffffffffff, parent_file_reference >> 48)
class NTFSPathHintsFormatterHelper(interface.CustomEventFormatterHelper):
"""NTFS path hints formatter helper."""
IDENTIFIER = 'ntfs_path_hints'
def FormatEventValues(self, event_values):
"""Formats event values using the helper.
Args:
event_values (dict[str, object]): event values.
"""
path_hints = event_values.get('path_hints', None)
if path_hints:
event_values['path_hints'] = ';'.join(path_hints)
manager.FormattersManager.RegisterEventFormatterHelpers([
NTFSFileReferenceFormatterHelper, NTFSParentFileReferenceFormatterHelper,
NTFSPathHintsFormatterHelper])
| apache-2.0 | 3,713,287,832,344,803,000 | -797,160,026,110,183,200 | 30.42623 | 78 | 0.713093 | false |
Hellowlol/PyTunes | libs/enzyme/flv.py | 180 | 6375 | # -*- coding: utf-8 -*-
# enzyme - Video metadata parser
# Copyright 2011-2012 Antoine Bertin <diaoulael@gmail.com>
# Copyright 2003-2006 Dirk Meyer <dischi@freevo.org>
#
# This file is part of enzyme.
#
# enzyme is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# enzyme is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with enzyme. If not, see <http://www.gnu.org/licenses/>.
from exceptions import ParseError
import core
import logging
import struct
__all__ = ['Parser']
# get logging object
log = logging.getLogger(__name__)
FLV_TAG_TYPE_AUDIO = 0x08
FLV_TAG_TYPE_VIDEO = 0x09
FLV_TAG_TYPE_META = 0x12
# audio flags
FLV_AUDIO_CHANNEL_MASK = 0x01
FLV_AUDIO_SAMPLERATE_MASK = 0x0c
FLV_AUDIO_CODECID_MASK = 0xf0
FLV_AUDIO_SAMPLERATE_OFFSET = 2
FLV_AUDIO_CODECID_OFFSET = 4
FLV_AUDIO_CODECID = (0x0001, 0x0002, 0x0055, 0x0001)
# video flags
FLV_VIDEO_CODECID_MASK = 0x0f
FLV_VIDEO_CODECID = ('FLV1', 'MSS1', 'VP60') # wild guess
FLV_DATA_TYPE_NUMBER = 0x00
FLV_DATA_TYPE_BOOL = 0x01
FLV_DATA_TYPE_STRING = 0x02
FLV_DATA_TYPE_OBJECT = 0x03
FLC_DATA_TYPE_CLIP = 0x04
FLV_DATA_TYPE_REFERENCE = 0x07
FLV_DATA_TYPE_ECMARRAY = 0x08
FLV_DATA_TYPE_ENDOBJECT = 0x09
FLV_DATA_TYPE_ARRAY = 0x0a
FLV_DATA_TYPE_DATE = 0x0b
FLV_DATA_TYPE_LONGSTRING = 0x0c
FLVINFO = {
'creator': 'copyright',
}
class FlashVideo(core.AVContainer):
"""
Experimental parser for Flash videos. It requires certain flags to
be set to report video resolutions and in most cases it does not
provide that information.
"""
table_mapping = { 'FLVINFO' : FLVINFO }
def __init__(self, file):
core.AVContainer.__init__(self)
self.mime = 'video/flv'
self.type = 'Flash Video'
data = file.read(13)
if len(data) < 13 or struct.unpack('>3sBBII', data)[0] != 'FLV':
raise ParseError()
for _ in range(10):
if self.audio and self.video:
break
data = file.read(11)
if len(data) < 11:
break
chunk = struct.unpack('>BH4BI', data)
size = (chunk[1] << 8) + chunk[2]
if chunk[0] == FLV_TAG_TYPE_AUDIO:
flags = ord(file.read(1))
if not self.audio:
a = core.AudioStream()
a.channels = (flags & FLV_AUDIO_CHANNEL_MASK) + 1
srate = (flags & FLV_AUDIO_SAMPLERATE_MASK)
a.samplerate = (44100 << (srate >> FLV_AUDIO_SAMPLERATE_OFFSET) >> 3)
codec = (flags & FLV_AUDIO_CODECID_MASK) >> FLV_AUDIO_CODECID_OFFSET
if codec < len(FLV_AUDIO_CODECID):
a.codec = FLV_AUDIO_CODECID[codec]
self.audio.append(a)
file.seek(size - 1, 1)
elif chunk[0] == FLV_TAG_TYPE_VIDEO:
flags = ord(file.read(1))
if not self.video:
v = core.VideoStream()
codec = (flags & FLV_VIDEO_CODECID_MASK) - 2
if codec < len(FLV_VIDEO_CODECID):
v.codec = FLV_VIDEO_CODECID[codec]
# width and height are in the meta packet, but I have
# no file with such a packet inside. So maybe we have
# to decode some parts of the video.
self.video.append(v)
file.seek(size - 1, 1)
elif chunk[0] == FLV_TAG_TYPE_META:
log.info(u'metadata %r', str(chunk))
metadata = file.read(size)
try:
while metadata:
length, value = self._parse_value(metadata)
if isinstance(value, dict):
log.info(u'metadata: %r', value)
if value.get('creator'):
self.copyright = value.get('creator')
if value.get('width'):
self.width = value.get('width')
if value.get('height'):
self.height = value.get('height')
if value.get('duration'):
self.length = value.get('duration')
self._appendtable('FLVINFO', value)
if not length:
# parse error
break
metadata = metadata[length:]
except (IndexError, struct.error, TypeError):
pass
else:
log.info(u'unkown %r', str(chunk))
file.seek(size, 1)
file.seek(4, 1)
def _parse_value(self, data):
"""
Parse the next metadata value.
"""
if ord(data[0]) == FLV_DATA_TYPE_NUMBER:
value = struct.unpack('>d', data[1:9])[0]
return 9, value
if ord(data[0]) == FLV_DATA_TYPE_BOOL:
return 2, bool(data[1])
if ord(data[0]) == FLV_DATA_TYPE_STRING:
length = (ord(data[1]) << 8) + ord(data[2])
return length + 3, data[3:length + 3]
if ord(data[0]) == FLV_DATA_TYPE_ECMARRAY:
init_length = len(data)
num = struct.unpack('>I', data[1:5])[0]
data = data[5:]
result = {}
for _ in range(num):
length = (ord(data[0]) << 8) + ord(data[1])
key = data[2:length + 2]
data = data[length + 2:]
length, value = self._parse_value(data)
if not length:
return 0, result
result[key] = value
data = data[length:]
return init_length - len(data), result
log.info(u'unknown code: %x. Stop metadata parser', ord(data[0]))
return 0, None
Parser = FlashVideo
| gpl-3.0 | 7,894,072,354,927,495,000 | -3,120,708,187,360,256,500 | 34.220994 | 89 | 0.529255 | false |
rexyeah/jira-cli | tests/common_bridge_cases.py | 1 | 3778 | """
"""
import os
import vcr
jiravcr = vcr.VCR(
record_mode = 'once',
match_on = ['uri', 'method'],
)
class BridgeTests:
def test_get_issue(self):
with jiravcr.use_cassette(os.path.join(self.vcr_directory, "issue.yaml")):
self.assertIsNotNone(self.bridge.get_issue("TP-9"))
def test_get_statuses(self):
with jiravcr.use_cassette(os.path.join(self.vcr_directory, "status.yaml")):
self.assertIsNotNone(self.bridge.get_statuses())
def test_get_projects(self):
with jiravcr.use_cassette(os.path.join(self.vcr_directory, "project.yaml")):
self.assertIsNotNone(self.bridge.get_projects())
def test_get_priorities(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "priorities.yaml")):
self.assertIsNotNone(self.bridge.get_priorities())
def test_get_transitions(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "transitions.yaml")):
self.assertIsNotNone(self.bridge.get_available_transitions("TP-9"))
def test_get_resolutions(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "resolutions.yaml")):
self.assertIsNotNone(self.bridge.get_resolutions())
def test_get_project_components(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "components.yaml")):
self.assertIsNotNone(self.bridge.get_components("TP"))
def test_get_issue_types(self):
with jiravcr.use_cassette(os.path.join(self.vcr_directory, "types.yaml")):
self.assertIsNotNone(self.bridge.get_issue_types())
def test_get_sub_task_issue_types(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "subtypes.yaml")):
self.assertIsNotNone(self.bridge.get_issue_types())
def test_get_filters(self):
with jiravcr.use_cassette(os.path.join(self.vcr_directory, "filters.yaml")):
self.assertIsNotNone(self.bridge.get_filters())
def test_search_free_text(self):
with jiravcr.use_cassette(os.path.join(self.vcr_directory, "search.yaml")):
self.assertTrue(
len(
self.bridge.search_issues("test jira-cli")
) == 1)
def test_search_jql(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "search-jql.yaml")):
self.assertTrue(
len(
self.bridge.search_issues_jql("summary~jira-cli")
) == 1)
def test_filter_fail(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "filter-search-fail.yaml")):
self.assertIsNotNone(
self.bridge.get_issues_by_filter("test-filter")
)
def test_filter_fail(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "filter-search.yaml")):
self.assertIsNotNone(
self.bridge.get_issues_by_filter("test filter", "blah")
)
def test_create_issue(self):
with jiravcr.use_cassette(os.path.join(self.vcr_directory, "create.yaml")):
self.assertIsNotNone(
self.bridge.create_issue("TP", summary='test-create-issue')
)
def test_create_child_issue(self):
with jiravcr.use_cassette(
os.path.join(self.vcr_directory, "childcreate.yaml")):
self.assertIsNotNone(
self.bridge.create_issue("TP", type='sub-task',
summary='test-create-issue',
parent='TP-10')
) | mit | -162,535,083,452,984,260 | -9,104,716,939,072,390,000 | 36.79 | 84 | 0.58973 | false |
mindw/pip | pip/_vendor/distlib/version.py | 132 | 23711 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2016 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""
Implementation of a flexible versioning scheme providing support for PEP-440,
setuptools-compatible and semantic versioning.
"""
import logging
import re
from .compat import string_types
__all__ = ['NormalizedVersion', 'NormalizedMatcher',
'LegacyVersion', 'LegacyMatcher',
'SemanticVersion', 'SemanticMatcher',
'UnsupportedVersionError', 'get_scheme']
logger = logging.getLogger(__name__)
class UnsupportedVersionError(ValueError):
"""This is an unsupported version."""
pass
class Version(object):
def __init__(self, s):
self._string = s = s.strip()
self._parts = parts = self.parse(s)
assert isinstance(parts, tuple)
assert len(parts) > 0
def parse(self, s):
raise NotImplementedError('please implement in a subclass')
def _check_compatible(self, other):
if type(self) != type(other):
raise TypeError('cannot compare %r and %r' % (self, other))
def __eq__(self, other):
self._check_compatible(other)
return self._parts == other._parts
def __ne__(self, other):
return not self.__eq__(other)
def __lt__(self, other):
self._check_compatible(other)
return self._parts < other._parts
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self.__lt__(other) or self.__eq__(other)
def __ge__(self, other):
return self.__gt__(other) or self.__eq__(other)
# See http://docs.python.org/reference/datamodel#object.__hash__
def __hash__(self):
return hash(self._parts)
def __repr__(self):
return "%s('%s')" % (self.__class__.__name__, self._string)
def __str__(self):
return self._string
@property
def is_prerelease(self):
raise NotImplementedError('Please implement in subclasses.')
class Matcher(object):
version_class = None
dist_re = re.compile(r"^(\w[\s\w'.-]*)(\((.*)\))?")
comp_re = re.compile(r'^(<=|>=|<|>|!=|={2,3}|~=)?\s*([^\s,]+)$')
num_re = re.compile(r'^\d+(\.\d+)*$')
# value is either a callable or the name of a method
_operators = {
'<': lambda v, c, p: v < c,
'>': lambda v, c, p: v > c,
'<=': lambda v, c, p: v == c or v < c,
'>=': lambda v, c, p: v == c or v > c,
'==': lambda v, c, p: v == c,
'===': lambda v, c, p: v == c,
# by default, compatible => >=.
'~=': lambda v, c, p: v == c or v > c,
'!=': lambda v, c, p: v != c,
}
def __init__(self, s):
if self.version_class is None:
raise ValueError('Please specify a version class')
self._string = s = s.strip()
m = self.dist_re.match(s)
if not m:
raise ValueError('Not valid: %r' % s)
groups = m.groups('')
self.name = groups[0].strip()
self.key = self.name.lower() # for case-insensitive comparisons
clist = []
if groups[2]:
constraints = [c.strip() for c in groups[2].split(',')]
for c in constraints:
m = self.comp_re.match(c)
if not m:
raise ValueError('Invalid %r in %r' % (c, s))
groups = m.groups()
op = groups[0] or '~='
s = groups[1]
if s.endswith('.*'):
if op not in ('==', '!='):
raise ValueError('\'.*\' not allowed for '
'%r constraints' % op)
# Could be a partial version (e.g. for '2.*') which
# won't parse as a version, so keep it as a string
vn, prefix = s[:-2], True
if not self.num_re.match(vn):
# Just to check that vn is a valid version
self.version_class(vn)
else:
# Should parse as a version, so we can create an
# instance for the comparison
vn, prefix = self.version_class(s), False
clist.append((op, vn, prefix))
self._parts = tuple(clist)
def match(self, version):
"""
Check if the provided version matches the constraints.
:param version: The version to match against this instance.
:type version: Strring or :class:`Version` instance.
"""
if isinstance(version, string_types):
version = self.version_class(version)
for operator, constraint, prefix in self._parts:
f = self._operators.get(operator)
if isinstance(f, string_types):
f = getattr(self, f)
if not f:
msg = ('%r not implemented '
'for %s' % (operator, self.__class__.__name__))
raise NotImplementedError(msg)
if not f(version, constraint, prefix):
return False
return True
@property
def exact_version(self):
result = None
if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='):
result = self._parts[0][1]
return result
def _check_compatible(self, other):
if type(self) != type(other) or self.name != other.name:
raise TypeError('cannot compare %s and %s' % (self, other))
def __eq__(self, other):
self._check_compatible(other)
return self.key == other.key and self._parts == other._parts
def __ne__(self, other):
return not self.__eq__(other)
# See http://docs.python.org/reference/datamodel#object.__hash__
def __hash__(self):
return hash(self.key) + hash(self._parts)
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self._string)
def __str__(self):
return self._string
PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?'
r'(\.(post)(\d+))?(\.(dev)(\d+))?'
r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$')
def _pep_440_key(s):
s = s.strip()
m = PEP440_VERSION_RE.match(s)
if not m:
raise UnsupportedVersionError('Not a valid version: %s' % s)
groups = m.groups()
nums = tuple(int(v) for v in groups[1].split('.'))
while len(nums) > 1 and nums[-1] == 0:
nums = nums[:-1]
if not groups[0]:
epoch = 0
else:
epoch = int(groups[0])
pre = groups[4:6]
post = groups[7:9]
dev = groups[10:12]
local = groups[13]
if pre == (None, None):
pre = ()
else:
pre = pre[0], int(pre[1])
if post == (None, None):
post = ()
else:
post = post[0], int(post[1])
if dev == (None, None):
dev = ()
else:
dev = dev[0], int(dev[1])
if local is None:
local = ()
else:
parts = []
for part in local.split('.'):
# to ensure that numeric compares as > lexicographic, avoid
# comparing them directly, but encode a tuple which ensures
# correct sorting
if part.isdigit():
part = (1, int(part))
else:
part = (0, part)
parts.append(part)
local = tuple(parts)
if not pre:
# either before pre-release, or final release and after
if not post and dev:
# before pre-release
pre = ('a', -1) # to sort before a0
else:
pre = ('z',) # to sort after all pre-releases
# now look at the state of post and dev.
if not post:
post = ('_',) # sort before 'a'
if not dev:
dev = ('final',)
#print('%s -> %s' % (s, m.groups()))
return epoch, nums, pre, post, dev, local
_normalized_key = _pep_440_key
class NormalizedVersion(Version):
"""A rational version.
Good:
1.2 # equivalent to "1.2.0"
1.2.0
1.2a1
1.2.3a2
1.2.3b1
1.2.3c1
1.2.3.4
TODO: fill this out
Bad:
1 # mininum two numbers
1.2a # release level must have a release serial
1.2.3b
"""
def parse(self, s):
result = _normalized_key(s)
# _normalized_key loses trailing zeroes in the release
# clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0
# However, PEP 440 prefix matching needs it: for example,
# (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0).
m = PEP440_VERSION_RE.match(s) # must succeed
groups = m.groups()
self._release_clause = tuple(int(v) for v in groups[1].split('.'))
return result
PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev'])
@property
def is_prerelease(self):
return any(t[0] in self.PREREL_TAGS for t in self._parts if t)
def _match_prefix(x, y):
x = str(x)
y = str(y)
if x == y:
return True
if not x.startswith(y):
return False
n = len(y)
return x[n] == '.'
class NormalizedMatcher(Matcher):
version_class = NormalizedVersion
# value is either a callable or the name of a method
_operators = {
'~=': '_match_compatible',
'<': '_match_lt',
'>': '_match_gt',
'<=': '_match_le',
'>=': '_match_ge',
'==': '_match_eq',
'===': '_match_arbitrary',
'!=': '_match_ne',
}
def _adjust_local(self, version, constraint, prefix):
if prefix:
strip_local = '+' not in constraint and version._parts[-1]
else:
# both constraint and version are
# NormalizedVersion instances.
# If constraint does not have a local component,
# ensure the version doesn't, either.
strip_local = not constraint._parts[-1] and version._parts[-1]
if strip_local:
s = version._string.split('+', 1)[0]
version = self.version_class(s)
return version, constraint
def _match_lt(self, version, constraint, prefix):
version, constraint = self._adjust_local(version, constraint, prefix)
if version >= constraint:
return False
release_clause = constraint._release_clause
pfx = '.'.join([str(i) for i in release_clause])
return not _match_prefix(version, pfx)
def _match_gt(self, version, constraint, prefix):
version, constraint = self._adjust_local(version, constraint, prefix)
if version <= constraint:
return False
release_clause = constraint._release_clause
pfx = '.'.join([str(i) for i in release_clause])
return not _match_prefix(version, pfx)
def _match_le(self, version, constraint, prefix):
version, constraint = self._adjust_local(version, constraint, prefix)
return version <= constraint
def _match_ge(self, version, constraint, prefix):
version, constraint = self._adjust_local(version, constraint, prefix)
return version >= constraint
def _match_eq(self, version, constraint, prefix):
version, constraint = self._adjust_local(version, constraint, prefix)
if not prefix:
result = (version == constraint)
else:
result = _match_prefix(version, constraint)
return result
def _match_arbitrary(self, version, constraint, prefix):
return str(version) == str(constraint)
def _match_ne(self, version, constraint, prefix):
version, constraint = self._adjust_local(version, constraint, prefix)
if not prefix:
result = (version != constraint)
else:
result = not _match_prefix(version, constraint)
return result
def _match_compatible(self, version, constraint, prefix):
version, constraint = self._adjust_local(version, constraint, prefix)
if version == constraint:
return True
if version < constraint:
return False
# if not prefix:
# return True
release_clause = constraint._release_clause
if len(release_clause) > 1:
release_clause = release_clause[:-1]
pfx = '.'.join([str(i) for i in release_clause])
return _match_prefix(version, pfx)
_REPLACEMENTS = (
(re.compile('[.+-]$'), ''), # remove trailing puncts
(re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start
(re.compile('^[.-]'), ''), # remove leading puncts
(re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses
(re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
(re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
(re.compile('[.]{2,}'), '.'), # multiple runs of '.'
(re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha
(re.compile(r'\b(pre-alpha|prealpha)\b'),
'pre.alpha'), # standardise
(re.compile(r'\(beta\)$'), 'beta'), # remove parentheses
)
_SUFFIX_REPLACEMENTS = (
(re.compile('^[:~._+-]+'), ''), # remove leading puncts
(re.compile('[,*")([\]]'), ''), # remove unwanted chars
(re.compile('[~:+_ -]'), '.'), # replace illegal chars
(re.compile('[.]{2,}'), '.'), # multiple runs of '.'
(re.compile(r'\.$'), ''), # trailing '.'
)
_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)')
def _suggest_semantic_version(s):
"""
Try to suggest a semantic form for a version for which
_suggest_normalized_version couldn't come up with anything.
"""
result = s.strip().lower()
for pat, repl in _REPLACEMENTS:
result = pat.sub(repl, result)
if not result:
result = '0.0.0'
# Now look for numeric prefix, and separate it out from
# the rest.
#import pdb; pdb.set_trace()
m = _NUMERIC_PREFIX.match(result)
if not m:
prefix = '0.0.0'
suffix = result
else:
prefix = m.groups()[0].split('.')
prefix = [int(i) for i in prefix]
while len(prefix) < 3:
prefix.append(0)
if len(prefix) == 3:
suffix = result[m.end():]
else:
suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():]
prefix = prefix[:3]
prefix = '.'.join([str(i) for i in prefix])
suffix = suffix.strip()
if suffix:
#import pdb; pdb.set_trace()
# massage the suffix.
for pat, repl in _SUFFIX_REPLACEMENTS:
suffix = pat.sub(repl, suffix)
if not suffix:
result = prefix
else:
sep = '-' if 'dev' in suffix else '+'
result = prefix + sep + suffix
if not is_semver(result):
result = None
return result
def _suggest_normalized_version(s):
"""Suggest a normalized version close to the given version string.
If you have a version string that isn't rational (i.e. NormalizedVersion
doesn't like it) then you might be able to get an equivalent (or close)
rational version from this function.
This does a number of simple normalizations to the given string, based
on observation of versions currently in use on PyPI. Given a dump of
those version during PyCon 2009, 4287 of them:
- 2312 (53.93%) match NormalizedVersion without change
with the automatic suggestion
- 3474 (81.04%) match when using this suggestion method
@param s {str} An irrational version string.
@returns A rational version string, or None, if couldn't determine one.
"""
try:
_normalized_key(s)
return s # already rational
except UnsupportedVersionError:
pass
rs = s.lower()
# part of this could use maketrans
for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
('beta', 'b'), ('rc', 'c'), ('-final', ''),
('-pre', 'c'),
('-release', ''), ('.release', ''), ('-stable', ''),
('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
('final', '')):
rs = rs.replace(orig, repl)
# if something ends with dev or pre, we add a 0
rs = re.sub(r"pre$", r"pre0", rs)
rs = re.sub(r"dev$", r"dev0", rs)
# if we have something like "b-2" or "a.2" at the end of the
# version, that is pobably beta, alpha, etc
# let's remove the dash or dot
rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)
# 1.0-dev-r371 -> 1.0.dev371
# 0.1-dev-r79 -> 0.1.dev79
rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
# Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
# Clean: v0.3, v1.0
if rs.startswith('v'):
rs = rs[1:]
# Clean leading '0's on numbers.
#TODO: unintended side-effect on, e.g., "2003.05.09"
# PyPI stats: 77 (~2%) better
rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
# Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
# zero.
# PyPI stats: 245 (7.56%) better
rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
# the 'dev-rNNN' tag is a dev tag
rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
# clean the - when used as a pre delimiter
rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
# a terminal "dev" or "devel" can be changed into ".dev0"
rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
# a terminal "dev" can be changed into ".dev0"
rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
# a terminal "final" or "stable" can be removed
rs = re.sub(r"(final|stable)$", "", rs)
# The 'r' and the '-' tags are post release tags
# 0.4a1.r10 -> 0.4a1.post10
# 0.9.33-17222 -> 0.9.33.post17222
# 0.9.33-r17222 -> 0.9.33.post17222
rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
# Clean 'r' instead of 'dev' usage:
# 0.9.33+r17222 -> 0.9.33.dev17222
# 1.0dev123 -> 1.0.dev123
# 1.0.git123 -> 1.0.dev123
# 1.0.bzr123 -> 1.0.dev123
# 0.1a0dev.123 -> 0.1a0.dev123
# PyPI stats: ~150 (~4%) better
rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
# Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
# 0.2.pre1 -> 0.2c1
# 0.2-c1 -> 0.2c1
# 1.0preview123 -> 1.0c123
# PyPI stats: ~21 (0.62%) better
rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
# Tcl/Tk uses "px" for their post release markers
rs = re.sub(r"p(\d+)$", r".post\1", rs)
try:
_normalized_key(rs)
except UnsupportedVersionError:
rs = None
return rs
#
# Legacy version processing (distribute-compatible)
#
_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I)
_VERSION_REPLACE = {
'pre': 'c',
'preview': 'c',
'-': 'final-',
'rc': 'c',
'dev': '@',
'': None,
'.': None,
}
def _legacy_key(s):
def get_parts(s):
result = []
for p in _VERSION_PART.split(s.lower()):
p = _VERSION_REPLACE.get(p, p)
if p:
if '0' <= p[:1] <= '9':
p = p.zfill(8)
else:
p = '*' + p
result.append(p)
result.append('*final')
return result
result = []
for p in get_parts(s):
if p.startswith('*'):
if p < '*final':
while result and result[-1] == '*final-':
result.pop()
while result and result[-1] == '00000000':
result.pop()
result.append(p)
return tuple(result)
class LegacyVersion(Version):
def parse(self, s):
return _legacy_key(s)
@property
def is_prerelease(self):
result = False
for x in self._parts:
if (isinstance(x, string_types) and x.startswith('*') and
x < '*final'):
result = True
break
return result
class LegacyMatcher(Matcher):
version_class = LegacyVersion
_operators = dict(Matcher._operators)
_operators['~='] = '_match_compatible'
numeric_re = re.compile('^(\d+(\.\d+)*)')
def _match_compatible(self, version, constraint, prefix):
if version < constraint:
return False
m = self.numeric_re.match(str(constraint))
if not m:
logger.warning('Cannot compute compatible match for version %s '
' and constraint %s', version, constraint)
return True
s = m.groups()[0]
if '.' in s:
s = s.rsplit('.', 1)[0]
return _match_prefix(version, s)
#
# Semantic versioning
#
_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)'
r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?'
r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I)
def is_semver(s):
return _SEMVER_RE.match(s)
def _semantic_key(s):
def make_tuple(s, absent):
if s is None:
result = (absent,)
else:
parts = s[1:].split('.')
# We can't compare ints and strings on Python 3, so fudge it
# by zero-filling numeric values so simulate a numeric comparison
result = tuple([p.zfill(8) if p.isdigit() else p for p in parts])
return result
m = is_semver(s)
if not m:
raise UnsupportedVersionError(s)
groups = m.groups()
major, minor, patch = [int(i) for i in groups[:3]]
# choose the '|' and '*' so that versions sort correctly
pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*')
return (major, minor, patch), pre, build
class SemanticVersion(Version):
def parse(self, s):
return _semantic_key(s)
@property
def is_prerelease(self):
return self._parts[1][0] != '|'
class SemanticMatcher(Matcher):
version_class = SemanticVersion
class VersionScheme(object):
def __init__(self, key, matcher, suggester=None):
self.key = key
self.matcher = matcher
self.suggester = suggester
def is_valid_version(self, s):
try:
self.matcher.version_class(s)
result = True
except UnsupportedVersionError:
result = False
return result
def is_valid_matcher(self, s):
try:
self.matcher(s)
result = True
except UnsupportedVersionError:
result = False
return result
def is_valid_constraint_list(self, s):
"""
Used for processing some metadata fields
"""
return self.is_valid_matcher('dummy_name (%s)' % s)
def suggest(self, s):
if self.suggester is None:
result = None
else:
result = self.suggester(s)
return result
_SCHEMES = {
'normalized': VersionScheme(_normalized_key, NormalizedMatcher,
_suggest_normalized_version),
'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s),
'semantic': VersionScheme(_semantic_key, SemanticMatcher,
_suggest_semantic_version),
}
_SCHEMES['default'] = _SCHEMES['normalized']
def get_scheme(name):
if name not in _SCHEMES:
raise ValueError('unknown scheme name: %r' % name)
return _SCHEMES[name]
| mit | 395,654,797,843,582,460 | -1,428,662,373,047,039,200 | 30.955526 | 78 | 0.520644 | false |
ThiagoGarciaAlves/intellij-community | python/lib/Lib/site-packages/django/contrib/gis/gdal/__init__.py | 397 | 2173 | """
This module houses ctypes interfaces for GDAL objects. The following GDAL
objects are supported:
CoordTransform: Used for coordinate transformations from one spatial
reference system to another.
Driver: Wraps an OGR data source driver.
DataSource: Wrapper for the OGR data source object, supports
OGR-supported data sources.
Envelope: A ctypes structure for bounding boxes (GDAL library
not required).
OGRGeometry: Object for accessing OGR Geometry functionality.
OGRGeomType: A class for representing the different OGR Geometry
types (GDAL library not required).
SpatialReference: Represents OSR Spatial Reference objects.
The GDAL library will be imported from the system path using the default
library name for the current OS. The default library path may be overridden
by setting `GDAL_LIBRARY_PATH` in your settings with the path to the GDAL C
library on your system.
GDAL links to a large number of external libraries that consume RAM when
loaded. Thus, it may desirable to disable GDAL on systems with limited
RAM resources -- this may be accomplished by setting `GDAL_LIBRARY_PATH`
to a non-existant file location (e.g., `GDAL_LIBRARY_PATH='/null/path'`;
setting to None/False/'' will not work as a string must be given).
"""
# Attempting to import objects that depend on the GDAL library. The
# HAS_GDAL flag will be set to True if the library is present on
# the system.
try:
from django.contrib.gis.gdal.driver import Driver
from django.contrib.gis.gdal.datasource import DataSource
from django.contrib.gis.gdal.libgdal import gdal_version, gdal_full_version, gdal_release_date, GEOJSON, GDAL_VERSION
from django.contrib.gis.gdal.srs import SpatialReference, CoordTransform
from django.contrib.gis.gdal.geometries import OGRGeometry
HAS_GDAL = True
except:
HAS_GDAL, GEOJSON = False, False
try:
from django.contrib.gis.gdal.envelope import Envelope
except ImportError:
# No ctypes, but don't raise an exception.
pass
from django.contrib.gis.gdal.error import check_err, OGRException, OGRIndexError, SRSException
from django.contrib.gis.gdal.geomtype import OGRGeomType
| apache-2.0 | -1,154,700,773,461,938,700 | -220,590,974,310,540,640 | 39.240741 | 121 | 0.773585 | false |
fedorpatlin/ansible | lib/ansible/modules/cloud/vmware/vmware_vswitch.py | 56 | 6915 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Joseph Callen <jcallen () csc.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: vmware_vswitch
short_description: Add a VMware Standard Switch to an ESXi host
description:
- Add a VMware Standard Switch to an ESXi host
version_added: 2.0
author: "Joseph Callen (@jcpowermac), Russell Teague (@mtnbikenc)"
notes:
- Tested on vSphere 5.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
switch_name:
description:
- vSwitch name to add
required: True
nic_name:
description:
- vmnic name to attach to vswitch
required: True
number_of_ports:
description:
- Number of port to configure on vswitch
default: 128
required: False
mtu:
description:
- MTU to configure on vswitch
required: False
state:
description:
- Add or remove the switch
default: 'present'
choices:
- 'present'
- 'absent'
required: False
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
# Example from Ansible playbook
- name: Add a VMware vSwitch
local_action:
module: vmware_vswitch
hostname: esxi_hostname
username: esxi_username
password: esxi_password
switch_name: vswitch_name
nic_name: vmnic_name
mtu: 9000
'''
try:
from pyVmomi import vim, vmodl
HAS_PYVMOMI = True
except ImportError:
HAS_PYVMOMI = False
def find_vswitch_by_name(host, vswitch_name):
for vss in host.config.network.vswitch:
if vss.name == vswitch_name:
return vss
return None
class VMwareHostVirtualSwitch(object):
def __init__(self, module):
self.host_system = None
self.content = None
self.vss = None
self.module = module
self.switch_name = module.params['switch_name']
self.number_of_ports = module.params['number_of_ports']
self.nic_name = module.params['nic_name']
self.mtu = module.params['mtu']
self.state = module.params['state']
self.content = connect_to_api(self.module)
def process_state(self):
try:
vswitch_states = {
'absent': {
'present': self.state_destroy_vswitch,
'absent': self.state_exit_unchanged,
},
'present': {
'update': self.state_update_vswitch,
'present': self.state_exit_unchanged,
'absent': self.state_create_vswitch,
}
}
vswitch_states[self.state][self.check_vswitch_configuration()]()
except vmodl.RuntimeFault as runtime_fault:
self.module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
self.module.fail_json(msg=method_fault.msg)
except Exception as e:
self.module.fail_json(msg=str(e))
# Source from
# https://github.com/rreubenur/pyvmomi-community-samples/blob/patch-1/samples/create_vswitch.py
def state_create_vswitch(self):
vss_spec = vim.host.VirtualSwitch.Specification()
vss_spec.numPorts = self.number_of_ports
vss_spec.mtu = self.mtu
vss_spec.bridge = vim.host.VirtualSwitch.BondBridge(nicDevice=[self.nic_name])
self.host_system.configManager.networkSystem.AddVirtualSwitch(vswitchName=self.switch_name, spec=vss_spec)
self.module.exit_json(changed=True)
def state_exit_unchanged(self):
self.module.exit_json(changed=False)
def state_destroy_vswitch(self):
config = vim.host.NetworkConfig()
for portgroup in self.host_system.configManager.networkSystem.networkInfo.portgroup:
if portgroup.spec.vswitchName == self.vss.name:
portgroup_config = vim.host.PortGroup.Config()
portgroup_config.changeOperation = "remove"
portgroup_config.spec = vim.host.PortGroup.Specification()
portgroup_config.spec.name = portgroup.spec.name
portgroup_config.spec.name = portgroup.spec.name
portgroup_config.spec.vlanId = portgroup.spec.vlanId
portgroup_config.spec.vswitchName = portgroup.spec.vswitchName
portgroup_config.spec.policy = vim.host.NetworkPolicy()
config.portgroup.append(portgroup_config)
self.host_system.configManager.networkSystem.UpdateNetworkConfig(config, "modify")
self.host_system.configManager.networkSystem.RemoveVirtualSwitch(self.vss.name)
self.module.exit_json(changed=True)
def state_update_vswitch(self):
self.module.exit_json(changed=False, msg="Currently not implemented.")
def check_vswitch_configuration(self):
host = get_all_objs(self.content, [vim.HostSystem])
if not host:
self.module.fail_json(msg="Unable to find host")
self.host_system = host.keys()[0]
self.vss = find_vswitch_by_name(self.host_system, self.switch_name)
if self.vss is None:
return 'absent'
else:
return 'present'
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(dict(switch_name=dict(required=True, type='str'),
nic_name=dict(required=True, type='str'),
number_of_ports=dict(required=False, type='int', default=128),
mtu=dict(required=False, type='int', default=1500),
state=dict(default='present', choices=['present', 'absent'], type='str')))
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
if not HAS_PYVMOMI:
module.fail_json(msg='pyvmomi is required for this module')
host_virtual_switch = VMwareHostVirtualSwitch(module)
host_virtual_switch.process_state()
from ansible.module_utils.vmware import *
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 | 1,227,522,353,073,271,000 | 5,084,135,295,821,142,000 | 32.897059 | 114 | 0.627043 | false |
RichardLitt/wyrd-django-dev | django/db/models/fields/related.py | 2 | 62048 | from operator import attrgetter
from django.db import connection, router
from django.db.backends import util
from django.db.models import signals, get_model
from django.db.models.fields import (AutoField, Field, IntegerField,
PositiveIntegerField, PositiveSmallIntegerField, FieldDoesNotExist)
from django.db.models.related import RelatedObject
from django.db.models.query import QuerySet
from django.db.models.query_utils import QueryWrapper
from django.db.models.deletion import CASCADE
from django.utils.encoding import smart_text
from django.utils import six
from django.utils.translation import ugettext_lazy as _, string_concat
from django.utils.functional import curry, cached_property
from django.core import exceptions
from django import forms
RECURSIVE_RELATIONSHIP_CONSTANT = 'self'
pending_lookups = {}
def add_lazy_relation(cls, field, relation, operation):
"""
Adds a lookup on ``cls`` when a related field is defined using a string,
i.e.::
class MyModel(Model):
fk = ForeignKey("AnotherModel")
This string can be:
* RECURSIVE_RELATIONSHIP_CONSTANT (i.e. "self") to indicate a recursive
relation.
* The name of a model (i.e "AnotherModel") to indicate another model in
the same app.
* An app-label and model name (i.e. "someapp.AnotherModel") to indicate
another model in a different app.
If the other model hasn't yet been loaded -- almost a given if you're using
lazy relationships -- then the relation won't be set up until the
class_prepared signal fires at the end of model initialization.
operation is the work that must be performed once the relation can be resolved.
"""
# Check for recursive relations
if relation == RECURSIVE_RELATIONSHIP_CONSTANT:
app_label = cls._meta.app_label
model_name = cls.__name__
else:
# Look for an "app.Model" relation
try:
app_label, model_name = relation.split(".")
except ValueError:
# If we can't split, assume a model in current app
app_label = cls._meta.app_label
model_name = relation
except AttributeError:
# If it doesn't have a split it's actually a model class
app_label = relation._meta.app_label
model_name = relation._meta.object_name
# Try to look up the related model, and if it's already loaded resolve the
# string right away. If get_model returns None, it means that the related
# model isn't loaded yet, so we need to pend the relation until the class
# is prepared.
model = get_model(app_label, model_name,
seed_cache=False, only_installed=False)
if model:
operation(field, model, cls)
else:
key = (app_label, model_name)
value = (cls, field, operation)
pending_lookups.setdefault(key, []).append(value)
def do_pending_lookups(sender, **kwargs):
"""
Handle any pending relations to the sending model. Sent from class_prepared.
"""
key = (sender._meta.app_label, sender.__name__)
for cls, field, operation in pending_lookups.pop(key, []):
operation(field, sender, cls)
signals.class_prepared.connect(do_pending_lookups)
#HACK
class RelatedField(object):
def contribute_to_class(self, cls, name):
sup = super(RelatedField, self)
# Store the opts for related_query_name()
self.opts = cls._meta
if hasattr(sup, 'contribute_to_class'):
sup.contribute_to_class(cls, name)
if not cls._meta.abstract and self.rel.related_name:
self.rel.related_name = self.rel.related_name % {
'class': cls.__name__.lower(),
'app_label': cls._meta.app_label.lower(),
}
other = self.rel.to
if isinstance(other, six.string_types) or other._meta.pk is None:
def resolve_related_class(field, model, cls):
field.rel.to = model
field.do_related_class(model, cls)
add_lazy_relation(cls, self, other, resolve_related_class)
else:
self.do_related_class(other, cls)
def set_attributes_from_rel(self):
self.name = self.name or (self.rel.to._meta.object_name.lower() + '_' + self.rel.to._meta.pk.name)
if self.verbose_name is None:
self.verbose_name = self.rel.to._meta.verbose_name
self.rel.field_name = self.rel.field_name or self.rel.to._meta.pk.name
def do_related_class(self, other, cls):
self.set_attributes_from_rel()
self.related = RelatedObject(other, cls, self)
if not cls._meta.abstract:
self.contribute_to_related_class(other, self.related)
def get_prep_lookup(self, lookup_type, value):
if hasattr(value, 'prepare'):
return value.prepare()
if hasattr(value, '_prepare'):
return value._prepare()
# FIXME: lt and gt are explicitly allowed to make
# get_(next/prev)_by_date work; other lookups are not allowed since that
# gets messy pretty quick. This is a good candidate for some refactoring
# in the future.
if lookup_type in ['exact', 'gt', 'lt', 'gte', 'lte']:
return self._pk_trace(value, 'get_prep_lookup', lookup_type)
if lookup_type in ('range', 'in'):
return [self._pk_trace(v, 'get_prep_lookup', lookup_type) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Related Field has invalid lookup: %s" % lookup_type)
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
if not prepared:
value = self.get_prep_lookup(lookup_type, value)
if hasattr(value, 'get_compiler'):
value = value.get_compiler(connection=connection)
if hasattr(value, 'as_sql') or hasattr(value, '_as_sql'):
# If the value has a relabel_aliases method, it will need to
# be invoked before the final SQL is evaluated
if hasattr(value, 'relabel_aliases'):
return value
if hasattr(value, 'as_sql'):
sql, params = value.as_sql()
else:
sql, params = value._as_sql(connection=connection)
return QueryWrapper(('(%s)' % sql), params)
# FIXME: lt and gt are explicitly allowed to make
# get_(next/prev)_by_date work; other lookups are not allowed since that
# gets messy pretty quick. This is a good candidate for some refactoring
# in the future.
if lookup_type in ['exact', 'gt', 'lt', 'gte', 'lte']:
return [self._pk_trace(value, 'get_db_prep_lookup', lookup_type,
connection=connection, prepared=prepared)]
if lookup_type in ('range', 'in'):
return [self._pk_trace(v, 'get_db_prep_lookup', lookup_type,
connection=connection, prepared=prepared)
for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Related Field has invalid lookup: %s" % lookup_type)
def _pk_trace(self, value, prep_func, lookup_type, **kwargs):
# Value may be a primary key, or an object held in a relation.
# If it is an object, then we need to get the primary key value for
# that object. In certain conditions (especially one-to-one relations),
# the primary key may itself be an object - so we need to keep drilling
# down until we hit a value that can be used for a comparison.
v = value
# In the case of an FK to 'self', this check allows to_field to be used
# for both forwards and reverse lookups across the FK. (For normal FKs,
# it's only relevant for forward lookups).
if isinstance(v, self.rel.to):
field_name = getattr(self.rel, "field_name", None)
else:
field_name = None
try:
while True:
if field_name is None:
field_name = v._meta.pk.name
v = getattr(v, field_name)
field_name = None
except AttributeError:
pass
except exceptions.ObjectDoesNotExist:
v = None
field = self
while field.rel:
if hasattr(field.rel, 'field_name'):
field = field.rel.to._meta.get_field(field.rel.field_name)
else:
field = field.rel.to._meta.pk
if lookup_type in ('range', 'in'):
v = [v]
v = getattr(field, prep_func)(lookup_type, v, **kwargs)
if isinstance(v, list):
v = v[0]
return v
def related_query_name(self):
# This method defines the name that can be used to identify this
# related object in a table-spanning query. It uses the lower-cased
# object_name by default, but this can be overridden with the
# "related_name" option.
return self.rel.related_name or self.opts.object_name.lower()
class SingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class pointed to by a related field.
# In the example "place.restaurant", the restaurant attribute is a
# SingleRelatedObjectDescriptor instance.
def __init__(self, related):
self.related = related
self.cache_name = related.get_cache_name()
def is_cached(self, instance):
return hasattr(instance, self.cache_name)
def get_query_set(self, **db_hints):
db = router.db_for_read(self.related.model, **db_hints)
return self.related.model._base_manager.using(db)
def get_prefetch_query_set(self, instances):
rel_obj_attr = attrgetter(self.related.field.attname)
instance_attr = lambda obj: obj._get_pk_val()
instances_dict = dict((instance_attr(inst), inst) for inst in instances)
params = {'%s__pk__in' % self.related.field.name: list(instances_dict)}
qs = self.get_query_set(instance=instances[0]).filter(**params)
# Since we're going to assign directly in the cache,
# we must manage the reverse relation cache manually.
rel_obj_cache_name = self.related.field.get_cache_name()
for rel_obj in qs:
instance = instances_dict[rel_obj_attr(rel_obj)]
setattr(rel_obj, rel_obj_cache_name, instance)
return qs, rel_obj_attr, instance_attr, True, self.cache_name
def __get__(self, instance, instance_type=None):
if instance is None:
return self
try:
rel_obj = getattr(instance, self.cache_name)
except AttributeError:
related_pk = instance._get_pk_val()
if related_pk is None:
rel_obj = None
else:
params = {'%s__pk' % self.related.field.name: related_pk}
try:
rel_obj = self.get_query_set(instance=instance).get(**params)
except self.related.model.DoesNotExist:
rel_obj = None
else:
setattr(rel_obj, self.related.field.get_cache_name(), instance)
setattr(instance, self.cache_name, rel_obj)
if rel_obj is None:
raise self.related.model.DoesNotExist
else:
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError("%s must be accessed via instance" % self.related.opts.object_name)
# The similarity of the code below to the code in
# ReverseSingleRelatedObjectDescriptor is annoying, but there's a bunch
# of small differences that would make a common base class convoluted.
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.related.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.related.get_accessor_name()))
elif value is not None and not isinstance(value, self.related.model):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.related.get_accessor_name(), self.related.opts.object_name))
elif value is not None:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": instance is on database "%s", value is on database "%s"' %
(value, instance._state.db, value._state.db))
related_pk = getattr(instance, self.related.field.rel.get_related_field().attname)
if related_pk is None:
raise ValueError('Cannot assign "%r": "%s" instance isn\'t saved in the database.' %
(value, instance._meta.object_name))
# Set the value of the related field to the value of the related object's related field
setattr(value, self.related.field.attname, related_pk)
# Since we already know what the related object is, seed the related
# object caches now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.cache_name, value)
setattr(value, self.related.field.get_cache_name(), instance)
class ReverseSingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class that defines the related field.
# In the example "choice.poll", the poll attribute is a
# ReverseSingleRelatedObjectDescriptor instance.
def __init__(self, field_with_rel):
self.field = field_with_rel
self.cache_name = self.field.get_cache_name()
def is_cached(self, instance):
return hasattr(instance, self.cache_name)
def get_query_set(self, **db_hints):
db = router.db_for_read(self.field.rel.to, **db_hints)
rel_mgr = self.field.rel.to._default_manager
# If the related manager indicates that it should be used for
# related fields, respect that.
if getattr(rel_mgr, 'use_for_related_fields', False):
return rel_mgr.using(db)
else:
return QuerySet(self.field.rel.to).using(db)
def get_prefetch_query_set(self, instances):
other_field = self.field.rel.get_related_field()
rel_obj_attr = attrgetter(other_field.attname)
instance_attr = attrgetter(self.field.attname)
instances_dict = dict((instance_attr(inst), inst) for inst in instances)
if other_field.rel:
params = {'%s__pk__in' % self.field.rel.field_name: list(instances_dict)}
else:
params = {'%s__in' % self.field.rel.field_name: list(instances_dict)}
qs = self.get_query_set(instance=instances[0]).filter(**params)
# Since we're going to assign directly in the cache,
# we must manage the reverse relation cache manually.
if not self.field.rel.multiple:
rel_obj_cache_name = self.field.related.get_cache_name()
for rel_obj in qs:
instance = instances_dict[rel_obj_attr(rel_obj)]
setattr(rel_obj, rel_obj_cache_name, instance)
return qs, rel_obj_attr, instance_attr, True, self.cache_name
def __get__(self, instance, instance_type=None):
if instance is None:
return self
try:
rel_obj = getattr(instance, self.cache_name)
except AttributeError:
val = getattr(instance, self.field.attname)
if val is None:
rel_obj = None
else:
other_field = self.field.rel.get_related_field()
if other_field.rel:
params = {'%s__%s' % (self.field.rel.field_name, other_field.rel.field_name): val}
else:
params = {'%s__exact' % self.field.rel.field_name: val}
qs = self.get_query_set(instance=instance)
# Assuming the database enforces foreign keys, this won't fail.
rel_obj = qs.get(**params)
if not self.field.rel.multiple:
setattr(rel_obj, self.field.related.get_cache_name(), instance)
setattr(instance, self.cache_name, rel_obj)
if rel_obj is None and not self.field.null:
raise self.field.rel.to.DoesNotExist
else:
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError("%s must be accessed via instance" % self.field.name)
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.field.name))
elif value is not None and not isinstance(value, self.field.rel.to):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.field.name, self.field.rel.to._meta.object_name))
elif value is not None:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": instance is on database "%s", value is on database "%s"' %
(value, instance._state.db, value._state.db))
# If we're setting the value of a OneToOneField to None, we need to clear
# out the cache on any old related object. Otherwise, deleting the
# previously-related object will also cause this object to be deleted,
# which is wrong.
if value is None:
# Look up the previously-related object, which may still be available
# since we've not yet cleared out the related field.
# Use the cache directly, instead of the accessor; if we haven't
# populated the cache, then we don't care - we're only accessing
# the object to invalidate the accessor cache, so there's no
# need to populate the cache just to expire it again.
related = getattr(instance, self.cache_name, None)
# If we've got an old related object, we need to clear out its
# cache. This cache also might not exist if the related object
# hasn't been accessed yet.
if related is not None:
setattr(related, self.field.related.get_cache_name(), None)
# Set the value of the related field
try:
val = getattr(value, self.field.rel.get_related_field().attname)
except AttributeError:
val = None
setattr(instance, self.field.attname, val)
# Since we already know what the related object is, seed the related
# object caches now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.cache_name, value)
if value is not None and not self.field.rel.multiple:
setattr(value, self.field.related.get_cache_name(), instance)
class ForeignRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ForeignKey pointed at them by
# some other model. In the example "poll.choice_set", the choice_set
# attribute is a ForeignRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
def __get__(self, instance, instance_type=None):
if instance is None:
return self
return self.related_manager_cls(instance)
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
manager = self.__get__(instance)
# If the foreign key can support nulls, then completely clear the related set.
# Otherwise, just move the named objects into the set.
if self.related.field.null:
manager.clear()
manager.add(*value)
@cached_property
def related_manager_cls(self):
# Dynamically create a class that subclasses the related model's default
# manager.
superclass = self.related.model._default_manager.__class__
rel_field = self.related.field
rel_model = self.related.model
attname = rel_field.rel.get_related_field().attname
class RelatedManager(superclass):
def __init__(self, instance):
super(RelatedManager, self).__init__()
self.instance = instance
self.core_filters = {
'%s__%s' % (rel_field.name, attname): getattr(instance, attname)
}
self.model = rel_model
def get_query_set(self):
try:
return self.instance._prefetched_objects_cache[rel_field.related_query_name()]
except (AttributeError, KeyError):
db = self._db or router.db_for_read(self.model, instance=self.instance)
qs = super(RelatedManager, self).get_query_set().using(db).filter(**self.core_filters)
qs._known_related_object = (rel_field.name, self.instance)
return qs
def get_prefetch_query_set(self, instances):
rel_obj_attr = attrgetter(rel_field.attname)
instance_attr = attrgetter(attname)
instances_dict = dict((instance_attr(inst), inst) for inst in instances)
db = self._db or router.db_for_read(self.model, instance=instances[0])
query = {'%s__%s__in' % (rel_field.name, attname): list(instances_dict)}
qs = super(RelatedManager, self).get_query_set().using(db).filter(**query)
# Since we just bypassed this class' get_query_set(), we must manage
# the reverse relation manually.
for rel_obj in qs:
instance = instances_dict[rel_obj_attr(rel_obj)]
setattr(rel_obj, rel_field.name, instance)
cache_name = rel_field.related_query_name()
return qs, rel_obj_attr, instance_attr, False, cache_name
def add(self, *objs):
for obj in objs:
if not isinstance(obj, self.model):
raise TypeError("'%s' instance expected, got %r" % (self.model._meta.object_name, obj))
setattr(obj, rel_field.name, self.instance)
obj.save()
add.alters_data = True
def create(self, **kwargs):
kwargs[rel_field.name] = self.instance
db = router.db_for_write(self.model, instance=self.instance)
return super(RelatedManager, self.db_manager(db)).create(**kwargs)
create.alters_data = True
def get_or_create(self, **kwargs):
# Update kwargs with the related object that this
# ForeignRelatedObjectsDescriptor knows about.
kwargs[rel_field.name] = self.instance
db = router.db_for_write(self.model, instance=self.instance)
return super(RelatedManager, self.db_manager(db)).get_or_create(**kwargs)
get_or_create.alters_data = True
# remove() and clear() are only provided if the ForeignKey can have a value of null.
if rel_field.null:
def remove(self, *objs):
val = getattr(self.instance, attname)
for obj in objs:
# Is obj actually part of this descriptor set?
if getattr(obj, rel_field.attname) == val:
setattr(obj, rel_field.name, None)
obj.save()
else:
raise rel_field.rel.to.DoesNotExist("%r is not related to %r." % (obj, self.instance))
remove.alters_data = True
def clear(self):
self.update(**{rel_field.name: None})
clear.alters_data = True
return RelatedManager
def create_many_related_manager(superclass, rel):
"""Creates a manager that subclasses 'superclass' (which is a Manager)
and adds behavior for many-to-many related objects."""
class ManyRelatedManager(superclass):
def __init__(self, model=None, query_field_name=None, instance=None, symmetrical=None,
source_field_name=None, target_field_name=None, reverse=False,
through=None, prefetch_cache_name=None):
super(ManyRelatedManager, self).__init__()
self.model = model
self.query_field_name = query_field_name
self.core_filters = {'%s__pk' % query_field_name: instance._get_pk_val()}
self.instance = instance
self.symmetrical = symmetrical
self.source_field_name = source_field_name
self.target_field_name = target_field_name
self.reverse = reverse
self.through = through
self.prefetch_cache_name = prefetch_cache_name
self._pk_val = self.instance.pk
if self._pk_val is None:
raise ValueError("%r instance needs to have a primary key value before a many-to-many relationship can be used." % instance.__class__.__name__)
def get_query_set(self):
try:
return self.instance._prefetched_objects_cache[self.prefetch_cache_name]
except (AttributeError, KeyError):
db = self._db or router.db_for_read(self.instance.__class__, instance=self.instance)
return super(ManyRelatedManager, self).get_query_set().using(db)._next_is_sticky().filter(**self.core_filters)
def get_prefetch_query_set(self, instances):
instance = instances[0]
from django.db import connections
db = self._db or router.db_for_read(instance.__class__, instance=instance)
query = {'%s__pk__in' % self.query_field_name:
set(obj._get_pk_val() for obj in instances)}
qs = super(ManyRelatedManager, self).get_query_set().using(db)._next_is_sticky().filter(**query)
# M2M: need to annotate the query in order to get the primary model
# that the secondary model was actually related to. We know that
# there will already be a join on the join table, so we can just add
# the select.
# For non-autocreated 'through' models, can't assume we are
# dealing with PK values.
fk = self.through._meta.get_field(self.source_field_name)
source_col = fk.column
join_table = self.through._meta.db_table
connection = connections[db]
qn = connection.ops.quote_name
qs = qs.extra(select={'_prefetch_related_val':
'%s.%s' % (qn(join_table), qn(source_col))})
select_attname = fk.rel.get_related_field().get_attname()
return (qs,
attrgetter('_prefetch_related_val'),
attrgetter(select_attname),
False,
self.prefetch_cache_name)
# If the ManyToMany relation has an intermediary model,
# the add and remove methods do not exist.
if rel.through._meta.auto_created:
def add(self, *objs):
self._add_items(self.source_field_name, self.target_field_name, *objs)
# If this is a symmetrical m2m relation to self, add the mirror entry in the m2m table
if self.symmetrical:
self._add_items(self.target_field_name, self.source_field_name, *objs)
add.alters_data = True
def remove(self, *objs):
self._remove_items(self.source_field_name, self.target_field_name, *objs)
# If this is a symmetrical m2m relation to self, remove the mirror entry in the m2m table
if self.symmetrical:
self._remove_items(self.target_field_name, self.source_field_name, *objs)
remove.alters_data = True
def clear(self):
self._clear_items(self.source_field_name)
# If this is a symmetrical m2m relation to self, clear the mirror entry in the m2m table
if self.symmetrical:
self._clear_items(self.target_field_name)
clear.alters_data = True
def create(self, **kwargs):
# This check needs to be done here, since we can't later remove this
# from the method lookup table, as we do with add and remove.
if not self.through._meta.auto_created:
opts = self.through._meta
raise AttributeError("Cannot use create() on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
db = router.db_for_write(self.instance.__class__, instance=self.instance)
new_obj = super(ManyRelatedManager, self.db_manager(db)).create(**kwargs)
self.add(new_obj)
return new_obj
create.alters_data = True
def get_or_create(self, **kwargs):
db = router.db_for_write(self.instance.__class__, instance=self.instance)
obj, created = \
super(ManyRelatedManager, self.db_manager(db)).get_or_create(**kwargs)
# We only need to add() if created because if we got an object back
# from get() then the relationship already exists.
if created:
self.add(obj)
return obj, created
get_or_create.alters_data = True
def _add_items(self, source_field_name, target_field_name, *objs):
# source_field_name: the PK fieldname in join table for the source object
# target_field_name: the PK fieldname in join table for the target object
# *objs - objects to add. Either object instances, or primary keys of object instances.
# If there aren't any objects, there is nothing to do.
from django.db.models import Model
if objs:
new_ids = set()
for obj in objs:
if isinstance(obj, self.model):
if not router.allow_relation(obj, self.instance):
raise ValueError('Cannot add "%r": instance is on database "%s", value is on database "%s"' %
(obj, self.instance._state.db, obj._state.db))
new_ids.add(obj.pk)
elif isinstance(obj, Model):
raise TypeError("'%s' instance expected, got %r" % (self.model._meta.object_name, obj))
else:
new_ids.add(obj)
db = router.db_for_write(self.through, instance=self.instance)
vals = self.through._default_manager.using(db).values_list(target_field_name, flat=True)
vals = vals.filter(**{
source_field_name: self._pk_val,
'%s__in' % target_field_name: new_ids,
})
new_ids = new_ids - set(vals)
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action='pre_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids, using=db)
# Add the ones that aren't there already
self.through._default_manager.using(db).bulk_create([
self.through(**{
'%s_id' % source_field_name: self._pk_val,
'%s_id' % target_field_name: obj_id,
})
for obj_id in new_ids
])
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action='post_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids, using=db)
def _remove_items(self, source_field_name, target_field_name, *objs):
# source_field_name: the PK colname in join table for the source object
# target_field_name: the PK colname in join table for the target object
# *objs - objects to remove
# If there aren't any objects, there is nothing to do.
if objs:
# Check that all the objects are of the right type
old_ids = set()
for obj in objs:
if isinstance(obj, self.model):
old_ids.add(obj.pk)
else:
old_ids.add(obj)
# Work out what DB we're operating on
db = router.db_for_write(self.through, instance=self.instance)
# Send a signal to the other end if need be.
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are deleting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action="pre_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids, using=db)
# Remove the specified objects from the join table
self.through._default_manager.using(db).filter(**{
source_field_name: self._pk_val,
'%s__in' % target_field_name: old_ids
}).delete()
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are deleting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action="post_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids, using=db)
def _clear_items(self, source_field_name):
db = router.db_for_write(self.through, instance=self.instance)
# source_field_name: the PK colname in join table for the source object
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are clearing the
# duplicate data rows for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action="pre_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None, using=db)
self.through._default_manager.using(db).filter(**{
source_field_name: self._pk_val
}).delete()
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are clearing the
# duplicate data rows for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action="post_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None, using=db)
return ManyRelatedManager
class ManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField pointed at them by
# some other model (rather than having a ManyToManyField themselves).
# In the example "publication.article_set", the article_set attribute is a
# ManyRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
@cached_property
def related_manager_cls(self):
# Dynamically create a class that subclasses the related
# model's default manager.
return create_many_related_manager(
self.related.model._default_manager.__class__,
self.related.field.rel
)
def __get__(self, instance, instance_type=None):
if instance is None:
return self
rel_model = self.related.model
manager = self.related_manager_cls(
model=rel_model,
query_field_name=self.related.field.name,
prefetch_cache_name=self.related.field.related_query_name(),
instance=instance,
symmetrical=False,
source_field_name=self.related.field.m2m_reverse_field_name(),
target_field_name=self.related.field.m2m_field_name(),
reverse=True,
through=self.related.field.rel.through,
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
if not self.related.field.rel.through._meta.auto_created:
opts = self.related.field.rel.through._meta
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ReverseManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField defined in their
# model (rather than having another model pointed *at* them).
# In the example "article.publications", the publications attribute is a
# ReverseManyRelatedObjectsDescriptor instance.
def __init__(self, m2m_field):
self.field = m2m_field
@property
def through(self):
# through is provided so that you have easy access to the through
# model (Book.authors.through) for inlines, etc. This is done as
# a property to ensure that the fully resolved value is returned.
return self.field.rel.through
@cached_property
def related_manager_cls(self):
# Dynamically create a class that subclasses the related model's
# default manager.
return create_many_related_manager(
self.field.rel.to._default_manager.__class__,
self.field.rel
)
def __get__(self, instance, instance_type=None):
if instance is None:
return self
manager = self.related_manager_cls(
model=self.field.rel.to,
query_field_name=self.field.related_query_name(),
prefetch_cache_name=self.field.name,
instance=instance,
symmetrical=self.field.rel.symmetrical,
source_field_name=self.field.m2m_field_name(),
target_field_name=self.field.m2m_reverse_field_name(),
reverse=False,
through=self.field.rel.through,
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
if not self.field.rel.through._meta.auto_created:
opts = self.field.rel.through._meta
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ManyToOneRel(object):
def __init__(self, to, field_name, related_name=None, limit_choices_to=None,
parent_link=False, on_delete=None):
try:
to._meta
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, six.string_types), "'to' must be either a model, a model name or the string %r" % RECURSIVE_RELATIONSHIP_CONSTANT
self.to, self.field_name = to, field_name
self.related_name = related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.multiple = True
self.parent_link = parent_link
self.on_delete = on_delete
def is_hidden(self):
"Should the related object be hidden?"
return self.related_name and self.related_name[-1] == '+'
def get_related_field(self):
"""
Returns the Field in the 'to' object to which this relationship is
tied.
"""
data = self.to._meta.get_field_by_name(self.field_name)
if not data[2]:
raise FieldDoesNotExist("No related field named '%s'" %
self.field_name)
return data[0]
class OneToOneRel(ManyToOneRel):
def __init__(self, to, field_name, related_name=None, limit_choices_to=None,
parent_link=False, on_delete=None):
super(OneToOneRel, self).__init__(to, field_name,
related_name=related_name, limit_choices_to=limit_choices_to,
parent_link=parent_link, on_delete=on_delete
)
self.multiple = False
class ManyToManyRel(object):
def __init__(self, to, related_name=None, limit_choices_to=None,
symmetrical=True, through=None):
self.to = to
self.related_name = related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.symmetrical = symmetrical
self.multiple = True
self.through = through
def is_hidden(self):
"Should the related object be hidden?"
return self.related_name and self.related_name[-1] == '+'
def get_related_field(self):
"""
Returns the field in the to' object to which this relationship is tied
(this is always the primary key on the target model). Provided for
symmetry with ManyToOneRel.
"""
return self.to._meta.pk
class ForeignKey(RelatedField, Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _('Model %(model)s with pk %(pk)r does not exist.')
}
description = _("Foreign Key (type determined by related field)")
def __init__(self, to, to_field=None, rel_class=ManyToOneRel, **kwargs):
try:
to_name = to._meta.object_name.lower()
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, six.string_types), "%s(%r) is invalid. First parameter to ForeignKey must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
else:
assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
# For backwards compatibility purposes, we need to *try* and set
# the to_field during FK construction. It won't be guaranteed to
# be correct until contribute_to_class is called. Refs #12190.
to_field = to_field or (to._meta.pk and to._meta.pk.name)
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
if 'db_index' not in kwargs:
kwargs['db_index'] = True
kwargs['rel'] = rel_class(to, to_field,
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
parent_link=kwargs.pop('parent_link', False),
on_delete=kwargs.pop('on_delete', CASCADE),
)
Field.__init__(self, **kwargs)
def validate(self, value, model_instance):
if self.rel.parent_link:
return
super(ForeignKey, self).validate(value, model_instance)
if value is None:
return
using = router.db_for_read(model_instance.__class__, instance=model_instance)
qs = self.rel.to._default_manager.using(using).filter(
**{self.rel.field_name: value}
)
qs = qs.complex_filter(self.rel.limit_choices_to)
if not qs.exists():
raise exceptions.ValidationError(self.error_messages['invalid'] % {
'model': self.rel.to._meta.verbose_name, 'pk': value})
def get_attname(self):
return '%s_id' % self.name
def get_validator_unique_lookup_type(self):
return '%s__%s__exact' % (self.name, self.rel.get_related_field().name)
def get_default(self):
"Here we check if the default value is an object and return the to_field if so."
field_default = super(ForeignKey, self).get_default()
if isinstance(field_default, self.rel.to):
return getattr(field_default, self.rel.get_related_field().attname)
return field_default
def get_db_prep_save(self, value, connection):
if value == '' or value == None:
return None
else:
return self.rel.get_related_field().get_db_prep_save(value,
connection=connection)
def value_to_string(self, obj):
if not obj:
# In required many-to-one fields with only one available choice,
# select that one available choice. Note: For SelectFields
# we have to check that the length of choices is *2*, not 1,
# because SelectFields always have an initial "blank" value.
if not self.blank and self.choices:
choice_list = self.get_choices_default()
if len(choice_list) == 2:
return smart_text(choice_list[1][0])
return Field.value_to_string(self, obj)
def contribute_to_class(self, cls, name):
super(ForeignKey, self).contribute_to_class(cls, name)
setattr(cls, self.name, ReverseSingleRelatedObjectDescriptor(self))
if isinstance(self.rel.to, six.string_types):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "o2m")
def contribute_to_related_class(self, cls, related):
# Internal FK's - i.e., those with a related name ending with '+' -
# and swapped models don't get a related descriptor.
if not self.rel.is_hidden() and not related.model._meta.swapped:
setattr(cls, related.get_accessor_name(), ForeignRelatedObjectsDescriptor(related))
if self.rel.limit_choices_to:
cls._meta.related_fkey_lookups.append(self.rel.limit_choices_to)
if self.rel.field_name is None:
self.rel.field_name = cls._meta.pk.name
def formfield(self, **kwargs):
db = kwargs.pop('using', None)
if isinstance(self.rel.to, six.string_types):
raise ValueError("Cannot create form field for %r yet, because "
"its related model %r has not been loaded yet" %
(self.name, self.rel.to))
defaults = {
'form_class': forms.ModelChoiceField,
'queryset': self.rel.to._default_manager.using(db).complex_filter(self.rel.limit_choices_to),
'to_field_name': self.rel.field_name,
}
defaults.update(kwargs)
return super(ForeignKey, self).formfield(**defaults)
def db_type(self, connection):
# The database column type of a ForeignKey is the column type
# of the field to which it points. An exception is if the ForeignKey
# points to an AutoField/PositiveIntegerField/PositiveSmallIntegerField,
# in which case the column type is simply that of an IntegerField.
# If the database needs similar types for key fields however, the only
# thing we can do is making AutoField an IntegerField.
rel_field = self.rel.get_related_field()
if (isinstance(rel_field, AutoField) or
(not connection.features.related_fields_match_type and
isinstance(rel_field, (PositiveIntegerField,
PositiveSmallIntegerField)))):
return IntegerField().db_type(connection=connection)
return rel_field.db_type(connection=connection)
class OneToOneField(ForeignKey):
"""
A OneToOneField is essentially the same as a ForeignKey, with the exception
that always carries a "unique" constraint with it and the reverse relation
always returns the object pointed to (since there will only ever be one),
rather than returning a list.
"""
description = _("One-to-one relationship")
def __init__(self, to, to_field=None, **kwargs):
kwargs['unique'] = True
super(OneToOneField, self).__init__(to, to_field, OneToOneRel, **kwargs)
def contribute_to_related_class(self, cls, related):
setattr(cls, related.get_accessor_name(),
SingleRelatedObjectDescriptor(related))
def formfield(self, **kwargs):
if self.rel.parent_link:
return None
return super(OneToOneField, self).formfield(**kwargs)
def save_form_data(self, instance, data):
if isinstance(data, self.rel.to):
setattr(instance, self.name, data)
else:
setattr(instance, self.attname, data)
def create_many_to_many_intermediary_model(field, klass):
from django.db import models
managed = True
if isinstance(field.rel.to, six.string_types) and field.rel.to != RECURSIVE_RELATIONSHIP_CONSTANT:
to_model = field.rel.to
to = to_model.split('.')[-1]
def set_managed(field, model, cls):
field.rel.through._meta.managed = model._meta.managed or cls._meta.managed
add_lazy_relation(klass, field, to_model, set_managed)
elif isinstance(field.rel.to, six.string_types):
to = klass._meta.object_name
to_model = klass
managed = klass._meta.managed
else:
to = field.rel.to._meta.object_name
to_model = field.rel.to
managed = klass._meta.managed or to_model._meta.managed
name = '%s_%s' % (klass._meta.object_name, field.name)
if field.rel.to == RECURSIVE_RELATIONSHIP_CONSTANT or to == klass._meta.object_name:
from_ = 'from_%s' % to.lower()
to = 'to_%s' % to.lower()
else:
from_ = klass._meta.object_name.lower()
to = to.lower()
meta = type('Meta', (object,), {
'db_table': field._get_m2m_db_table(klass._meta),
'managed': managed,
'auto_created': klass,
'app_label': klass._meta.app_label,
'db_tablespace': klass._meta.db_tablespace,
'unique_together': (from_, to),
'verbose_name': '%(from)s-%(to)s relationship' % {'from': from_, 'to': to},
'verbose_name_plural': '%(from)s-%(to)s relationships' % {'from': from_, 'to': to},
})
# Construct and return the new class.
return type(name, (models.Model,), {
'Meta': meta,
'__module__': klass.__module__,
from_: models.ForeignKey(klass, related_name='%s+' % name, db_tablespace=field.db_tablespace),
to: models.ForeignKey(to_model, related_name='%s+' % name, db_tablespace=field.db_tablespace)
})
class ManyToManyField(RelatedField, Field):
description = _("Many-to-many relationship")
def __init__(self, to, **kwargs):
try:
assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, six.string_types), "%s(%r) is invalid. First parameter to ManyToManyField must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
# Python 2.6 and earlier require dictionary keys to be of str type,
# not unicode and class names must be ASCII (in Python 2.x), so we
# forcibly coerce it here (breaks early if there's a problem).
to = str(to)
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
kwargs['rel'] = ManyToManyRel(to,
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
symmetrical=kwargs.pop('symmetrical', to == RECURSIVE_RELATIONSHIP_CONSTANT),
through=kwargs.pop('through', None))
self.db_table = kwargs.pop('db_table', None)
if kwargs['rel'].through is not None:
assert self.db_table is None, "Cannot specify a db_table if an intermediary model is used."
Field.__init__(self, **kwargs)
msg = _('Hold down "Control", or "Command" on a Mac, to select more than one.')
self.help_text = string_concat(self.help_text, ' ', msg)
def get_choices_default(self):
return Field.get_choices(self, include_blank=False)
def _get_m2m_db_table(self, opts):
"Function that can be curried to provide the m2m table name for this relation"
if self.rel.through is not None:
return self.rel.through._meta.db_table
elif self.db_table:
return self.db_table
else:
return util.truncate_name('%s_%s' % (opts.db_table, self.name),
connection.ops.max_name_length())
def _get_m2m_attr(self, related, attr):
"Function that can be curried to provide the source accessor or DB column name for the m2m table"
cache_attr = '_m2m_%s_cache' % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
for f in self.rel.through._meta.fields:
if hasattr(f, 'rel') and f.rel and f.rel.to == related.model:
setattr(self, cache_attr, getattr(f, attr))
return getattr(self, cache_attr)
def _get_m2m_reverse_attr(self, related, attr):
"Function that can be curried to provide the related accessor or DB column name for the m2m table"
cache_attr = '_m2m_reverse_%s_cache' % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
found = False
for f in self.rel.through._meta.fields:
if hasattr(f, 'rel') and f.rel and f.rel.to == related.parent_model:
if related.model == related.parent_model:
# If this is an m2m-intermediate to self,
# the first foreign key you find will be
# the source column. Keep searching for
# the second foreign key.
if found:
setattr(self, cache_attr, getattr(f, attr))
break
else:
found = True
else:
setattr(self, cache_attr, getattr(f, attr))
break
return getattr(self, cache_attr)
def value_to_string(self, obj):
data = ''
if obj:
qs = getattr(obj, self.name).all()
data = [instance._get_pk_val() for instance in qs]
else:
# In required many-to-many fields with only one available choice,
# select that one available choice.
if not self.blank:
choices_list = self.get_choices_default()
if len(choices_list) == 1:
data = [choices_list[0][0]]
return smart_text(data)
def contribute_to_class(self, cls, name):
# To support multiple relations to self, it's useful to have a non-None
# related name on symmetrical relations for internal reasons. The
# concept doesn't make a lot of sense externally ("you want me to
# specify *what* on my non-reversible relation?!"), so we set it up
# automatically. The funky name reduces the chance of an accidental
# clash.
if self.rel.symmetrical and (self.rel.to == "self" or self.rel.to == cls._meta.object_name):
self.rel.related_name = "%s_rel_+" % name
super(ManyToManyField, self).contribute_to_class(cls, name)
# The intermediate m2m model is not auto created if:
# 1) There is a manually specified intermediate, or
# 2) The class owning the m2m field is abstract.
# 3) The class owning the m2m field has been swapped out.
if not self.rel.through and not cls._meta.abstract and not cls._meta.swapped:
self.rel.through = create_many_to_many_intermediary_model(self, cls)
# Add the descriptor for the m2m relation
setattr(cls, self.name, ReverseManyRelatedObjectsDescriptor(self))
# Set up the accessor for the m2m table name for the relation
self.m2m_db_table = curry(self._get_m2m_db_table, cls._meta)
# Populate some necessary rel arguments so that cross-app relations
# work correctly.
if isinstance(self.rel.through, six.string_types):
def resolve_through_model(field, model, cls):
field.rel.through = model
add_lazy_relation(cls, self, self.rel.through, resolve_through_model)
if isinstance(self.rel.to, six.string_types):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "m2m")
def contribute_to_related_class(self, cls, related):
# Internal M2Ms (i.e., those with a related name ending with '+')
# and swapped models don't get a related descriptor.
if not self.rel.is_hidden() and not related.model._meta.swapped:
setattr(cls, related.get_accessor_name(), ManyRelatedObjectsDescriptor(related))
# Set up the accessors for the column names on the m2m table
self.m2m_column_name = curry(self._get_m2m_attr, related, 'column')
self.m2m_reverse_name = curry(self._get_m2m_reverse_attr, related, 'column')
self.m2m_field_name = curry(self._get_m2m_attr, related, 'name')
self.m2m_reverse_field_name = curry(self._get_m2m_reverse_attr, related, 'name')
get_m2m_rel = curry(self._get_m2m_attr, related, 'rel')
self.m2m_target_field_name = lambda: get_m2m_rel().field_name
get_m2m_reverse_rel = curry(self._get_m2m_reverse_attr, related, 'rel')
self.m2m_reverse_target_field_name = lambda: get_m2m_reverse_rel().field_name
def set_attributes_from_rel(self):
pass
def value_from_object(self, obj):
"Returns the value of this field in the given model instance."
return getattr(obj, self.attname).all()
def save_form_data(self, instance, data):
setattr(instance, self.attname, data)
def formfield(self, **kwargs):
db = kwargs.pop('using', None)
defaults = {
'form_class': forms.ModelMultipleChoiceField,
'queryset': self.rel.to._default_manager.using(db).complex_filter(self.rel.limit_choices_to)
}
defaults.update(kwargs)
# If initial is passed in, it's a list of related objects, but the
# MultipleChoiceField takes a list of IDs.
if defaults.get('initial') is not None:
initial = defaults['initial']
if callable(initial):
initial = initial()
defaults['initial'] = [i._get_pk_val() for i in initial]
return super(ManyToManyField, self).formfield(**defaults)
def db_type(self, connection):
# A ManyToManyField is not represented by a single column,
# so return None.
return None
| bsd-3-clause | 6,839,620,686,847,126,000 | 1,614,546,491,906,298,600 | 45.934947 | 228 | 0.600019 | false |
SuriyaaKudoIsc/olympia | apps/api/tests/test_oauth.py | 5 | 34999 | """
Verifies basic OAUTH functionality in AMO.
Sample request_token query:
/en-US/firefox/oauth/request_token/?
oauth_consumer_key=GYKEp7m5fJpj9j8Vjz&
oauth_nonce=A7A79B47-B571-4D70-AA6C-592A0555E94B&
oauth_signature_method=HMAC-SHA1&
oauth_timestamp=1282950712&
oauth_version=1.0
With headers:
Authorization: OAuth realm="",
oauth_consumer_key="GYKEp7m5fJpj9j8Vjz",
oauth_signature_method="HMAC-SHA1",
oauth_signature="JBCA4ah%2FOQC0lLWV8aChGAC+15s%3D",
oauth_timestamp="1282950995",
oauth_nonce="1008F707-37E6-4ABF-8322-C6B658771D88",
oauth_version="1.0"
"""
import json
import os
import time
import urllib
import urlparse
from django import forms
from django.conf import settings
from django.core import mail
from django.test.client import (encode_multipart, Client, FakePayload,
BOUNDARY, MULTIPART_CONTENT, RequestFactory)
import oauth2 as oauth
from mock import Mock, patch
from nose.tools import eq_
from piston.models import Consumer
import amo
from amo.helpers import absolutify
from amo.tests import TestCase
from amo.urlresolvers import reverse
from api.authentication import AMOOAuthAuthentication
from addons.models import Addon, AddonUser, BlacklistedGuid
from devhub.models import ActivityLog, SubmitStep
from files.models import File
from perf.models import (Performance, PerformanceAppVersions,
PerformanceOSVersion)
from translations.models import Translation
from users.models import UserProfile
from versions.models import AppVersion, Version
def _get_args(consumer, token=None, callback=False, verifier=None):
d = dict(oauth_consumer_key=consumer.key,
oauth_nonce=oauth.generate_nonce(),
oauth_signature_method='HMAC-SHA1',
oauth_timestamp=int(time.time()),
oauth_version='1.0')
if callback:
d['oauth_callback'] = 'http://testserver/foo'
if verifier:
d['oauth_verifier'] = verifier
return d
def get_absolute_url(url):
if isinstance(url, tuple):
url = reverse(url[0], args=url[1:])
else:
url = reverse(url)
return 'http://%s%s' % ('api', url)
def data_keys(d):
# Form keys and values MUST be part of the signature.
# File keys MUST be part of the signature.
# But file values MUST NOT be included as part of the signature.
return dict([k, '' if isinstance(v, file) else v] for k, v in d.items())
class OAuthClient(Client):
"""OauthClient can make magically signed requests."""
signature_method = oauth.SignatureMethod_HMAC_SHA1()
def get(self, url, consumer=None, token=None, callback=False,
verifier=None, params=None):
url = get_absolute_url(url)
if params:
url = '%s?%s' % (url, urllib.urlencode(params))
req = oauth.Request(method='GET', url=url,
parameters=_get_args(consumer, callback=callback,
verifier=verifier))
req.sign_request(self.signature_method, consumer, token)
return super(OAuthClient, self).get(
req.to_url(), HTTP_HOST='api', HTTP_AUTHORIZATION='OAuth realm=""',
**req)
def delete(self, url, consumer=None, token=None, callback=False,
verifier=None):
url = get_absolute_url(url)
req = oauth.Request(method='DELETE', url=url,
parameters=_get_args(consumer, callback=callback,
verifier=verifier))
req.sign_request(self.signature_method, consumer, token)
return super(OAuthClient, self).delete(
req.to_url(), HTTP_HOST='api', HTTP_AUTHORIZATION='OAuth realm=""',
**req)
def post(self, url, consumer=None, token=None, callback=False,
verifier=None, data={}):
url = get_absolute_url(url)
params = _get_args(consumer, callback=callback, verifier=verifier)
params.update(data_keys(data))
req = oauth.Request(method='POST', url=url, parameters=params)
req.sign_request(self.signature_method, consumer, token)
return super(OAuthClient, self).post(
req.to_url(), HTTP_HOST='api', HTTP_AUTHORIZATION='OAuth realm=""',
data=data, headers=req.to_header())
def put(self, url, consumer=None, token=None, callback=False,
verifier=None, data={}, content_type=MULTIPART_CONTENT, **kwargs):
"""
Send a resource to the server using PUT.
"""
# If data has come from JSON remove unicode keys.
data = dict([(str(k), v) for k, v in data.items()])
url = get_absolute_url(url)
params = _get_args(consumer, callback=callback, verifier=verifier)
params.update(data_keys(data))
req = oauth.Request(method='PUT', url=url, parameters=params)
req.sign_request(self.signature_method, consumer, token)
post_data = encode_multipart(BOUNDARY, data)
parsed = urlparse.urlparse(url)
query_string = urllib.urlencode(req, doseq=True)
r = {
'CONTENT_LENGTH': len(post_data),
'CONTENT_TYPE': content_type,
'PATH_INFO': urllib.unquote(parsed[2]),
'QUERY_STRING': query_string,
'REQUEST_METHOD': 'PUT',
'wsgi.input': FakePayload(post_data),
'HTTP_HOST': 'api',
'HTTP_AUTHORIZATION': 'OAuth realm=""',
}
r.update(req)
response = self.request(**r)
return response
oclient = OAuthClient()
token_keys = ('oauth_token_secret', 'oauth_token',)
def get_token_from_response(response):
data = urlparse.parse_qs(response.content)
for key in token_keys:
assert key in data.keys(), '%s not in %s' % (key, data.keys())
return oauth.Token(key=data['oauth_token'][0],
secret=data['oauth_token_secret'][0])
def get_request_token(consumer, callback=False):
r = oclient.get('oauth.request_token', consumer, callback=callback)
return get_token_from_response(r)
def get_access_token(consumer, token, authorize=True, verifier=None):
r = oclient.get('oauth.access_token', consumer, token, verifier=verifier)
if authorize:
return get_token_from_response(r)
else:
eq_(r.status_code, 401)
class BaseOAuth(TestCase):
fixtures = ['base/users', 'base/appversion', 'base/licenses']
def setUp(self):
super(BaseOAuth, self).setUp()
self.editor = UserProfile.objects.get(email='editor@mozilla.com')
self.admin = UserProfile.objects.get(email='admin@mozilla.com')
consumers = []
for status in ('accepted', 'pending', 'canceled', ):
c = Consumer(name='a', status=status, user=self.editor)
c.generate_random_codes()
c.save()
consumers.append(c)
self.accepted_consumer = consumers[0]
self.pending_consumer = consumers[1]
self.canceled_consumer = consumers[2]
self.token = None
class TestBaseOAuth(BaseOAuth):
def test_accepted(self):
self.assertRaises(AssertionError, get_request_token,
self.accepted_consumer)
def test_accepted_callback(self):
get_request_token(self.accepted_consumer, callback=True)
def test_request_token_pending(self):
get_request_token(self.pending_consumer, callback=True)
def test_request_token_cancelled(self):
get_request_token(self.canceled_consumer, callback=True)
def test_request_token_fake(self):
"""Try with a phony consumer key"""
c = Mock()
c.key = 'yer'
c.secret = 'mom'
r = oclient.get('oauth.request_token', c, callback=True)
eq_(r.content, 'Invalid Consumer.')
@patch('piston.authentication.oauth.OAuthAuthentication.is_authenticated')
def _test_auth(self, pk, is_authenticated, two_legged=True):
request = RequestFactory().get('/en-US/firefox/2/api/2/user/',
data={'authenticate_as': pk})
request.user = None
def alter_request(*args, **kw):
request.user = self.admin
return True
is_authenticated.return_value = True
is_authenticated.side_effect = alter_request
auth = AMOOAuthAuthentication()
auth.two_legged = two_legged
auth.is_authenticated(request)
return request
def test_login_nonexistant(self):
eq_(self.admin, self._test_auth(9999).user)
def test_login_deleted(self):
# If _test_auth returns self.admin, that means the user was
# not altered to the user set in authenticate_as.
self.editor.update(deleted=True)
pk = self.editor.pk
eq_(self.admin, self._test_auth(pk).user)
def test_login_unconfirmed(self):
self.editor.update(confirmationcode='something')
pk = self.editor.pk
eq_(self.admin, self._test_auth(pk).user)
def test_login_works(self):
pk = self.editor.pk
eq_(self.editor, self._test_auth(pk).user)
def test_login_three_legged(self):
pk = self.editor.pk
eq_(self.admin, self._test_auth(pk, two_legged=False).user)
class TestUser(BaseOAuth):
def test_user(self):
r = oclient.get('api.user', self.accepted_consumer, self.token)
eq_(json.loads(r.content)['email'], 'editor@mozilla.com')
def test_user_lookup(self):
partner = UserProfile.objects.get(email='partner@mozilla.com')
c = Consumer(name='p', status='accepted', user=partner)
c.generate_random_codes()
c.save()
r = oclient.get('api.user', c, None,
params={'email': 'admin@mozilla.com'})
eq_(r.status_code, 200)
eq_(json.loads(r.content)['email'], 'admin@mozilla.com')
def test_failed_user_lookup(self):
partner = UserProfile.objects.get(email='partner@mozilla.com')
c = Consumer(name='p', status='accepted', user=partner)
c.generate_random_codes()
c.save()
r = oclient.get('api.user', c, None,
params={'email': 'not_a_user@mozilla.com'})
eq_(r.status_code, 404)
def test_forbidden_user_lookup(self, response_code=401):
r = oclient.get('api.user', self.accepted_consumer, self.token,
params={'email': 'admin@mozilla.com'})
eq_(r.status_code, response_code)
class TestDRFUser(TestUser):
def setUp(self):
super(TestDRFUser, self).setUp()
self.create_switch('drf', db=True)
def test_forbidden_user_lookup(self):
super(TestDRFUser, self).test_forbidden_user_lookup(response_code=403)
def activitylog_count(type=None):
qs = ActivityLog.objects
if type:
qs = qs.filter(action=type.id)
return qs.count()
class TestAddon(BaseOAuth):
created_http_status = 200
permission_denied_http_status = 401
def setUp(self):
super(TestAddon, self).setUp()
path = 'apps/files/fixtures/files/extension.xpi'
xpi = os.path.join(settings.ROOT, path)
f = open(xpi)
self.create_data = dict(builtin=0,
name='FREEDOM',
text='This is FREE!',
platform='mac',
xpi=f)
path = 'apps/files/fixtures/files/extension-0.2.xpi'
self.version_data = dict(builtin=2, platform='windows',
xpi=open(os.path.join(settings.ROOT, path)))
self.update_data = dict(name='fu',
default_locale='fr',
homepage='mozilla.com',
support_email='go@away.com',
support_url='http://google.com/',
description='awesome',
summary='sucks',
developer_comments='i made it for you',
eula='love it',
privacy_policy='aybabtu',
the_reason='for shits',
the_future='is gone',
view_source=1,
prerelease=1,
binary=False,
site_specific=1)
def make_create_request(self, data):
return oclient.post('api.addons', self.accepted_consumer, self.token,
data=data)
def create_addon(self):
current_count = activitylog_count(amo.LOG.CREATE_ADDON)
r = self.make_create_request(self.create_data)
eq_(r.status_code, self.created_http_status, r.content)
# 1 new add-on
eq_(activitylog_count(amo.LOG.CREATE_ADDON), current_count + 1)
return json.loads(r.content)
def test_create_no_user(self):
# The user in TwoLeggedAuth is set to the consumer user.
# If there isn't one, we should get a challenge back.
self.accepted_consumer.user = None
self.accepted_consumer.save()
r = self.make_create_request(self.create_data)
eq_(r.status_code, 401)
def test_create_user_altered(self):
data = self.create_data
data['authenticate_as'] = self.editor.pk
r = self.make_create_request(data)
eq_(r.status_code, self.created_http_status, r.content)
id = json.loads(r.content)['id']
ad = Addon.objects.get(pk=id)
eq_(len(ad.authors.all()), 1)
eq_(ad.authors.all()[0].pk, self.editor.pk)
def test_create(self):
# License (req'd): MIT, GPLv2, GPLv3, LGPLv2.1, LGPLv3, MIT, BSD, Other
# Custom License (if other, req'd)
# XPI file... (req'd)
# Platform (All by default): 'mac', 'all', 'bsd', 'linux', 'solaris',
# 'windows'
data = self.create_addon()
id = data['id']
name = data['name']
eq_(name, 'xpi name')
assert Addon.objects.get(pk=id)
def create_no_license(self):
data = self.create_data.copy()
del data['builtin']
return self.make_create_request(data)
def test_create_no_license(self):
r = self.create_no_license()
eq_(r.status_code, self.created_http_status, r.content)
eq_(Addon.objects.count(), 1)
def test_create_no_license_step(self):
r = self.create_no_license()
eq_(r.status_code, self.created_http_status, r.content)
id = json.loads(r.content)['id']
eq_(SubmitStep.objects.get(addon=id).step, 5)
def test_create_no_license_url(self):
self.create_no_license()
self.client.login(username='editor@mozilla.com', password='password')
res = self.client.get(reverse('devhub.submit.resume',
args=['xpi-name']))
self.assertRedirects(res, reverse('devhub.submit.5',
args=['xpi-name']))
def test_create_no_license_status(self):
self.create_no_license()
eq_(Addon.objects.get(slug='xpi-name').status, 0)
def test_create_status(self):
r = self.make_create_request(self.create_data)
eq_(r.status_code, self.created_http_status, r.content)
eq_(Addon.objects.get(slug='xpi-name').status, 0)
eq_(Addon.objects.count(), 1)
def test_create_slug(self):
r = self.make_create_request(self.create_data)
content = json.loads(r.content)
eq_(content['slug'], 'xpi-name')
eq_(content['resource_uri'],
absolutify(reverse('addons.detail', args=['xpi-name'])))
def test_delete(self):
data = self.create_addon()
id = data['id']
guid = data['guid']
# Force it to be public so its guid gets blacklisted.
Addon.objects.filter(id=id).update(highest_status=amo.STATUS_PUBLIC)
r = oclient.delete(('api.addon', id), self.accepted_consumer,
self.token)
eq_(r.status_code, 204, r.content)
eq_(Addon.objects.filter(pk=id).count(), 0, "Didn't delete.")
assert BlacklistedGuid.objects.filter(guid=guid)
eq_(len(mail.outbox), 1)
def test_update(self):
# create an addon
data = self.create_addon()
id = data['id']
current_count = activitylog_count()
r = oclient.put(('api.addon', id), self.accepted_consumer, self.token,
data=self.update_data)
eq_(r.status_code, 200, r.content)
# EDIT_PROPERTIES
eq_(activitylog_count(), current_count + 1)
a = Addon.objects.get(pk=id)
for field, expected in self.update_data.iteritems():
value = getattr(a, field)
if isinstance(value, Translation):
value = unicode(value)
eq_(value, expected,
"'%s' didn't match: got '%s' instead of '%s'"
% (field, getattr(a, field), expected))
@patch('api.handlers.AddonForm.is_valid')
def test_update_fail(self, is_valid):
data = self.create_addon()
id = data['id']
is_valid.return_value = False
r = oclient.put(('api.addon', id), self.accepted_consumer, self.token,
data=self.update_data)
eq_(r.status_code, 400, r.content)
def test_update_nonexistant(self):
r = oclient.put(('api.addon', 0), self.accepted_consumer, self.token,
data={})
eq_(r.status_code, 410, r.content)
@patch('api.handlers.XPIForm.clean_xpi')
def test_xpi_failure(self, f):
f.side_effect = forms.ValidationError('F')
r = self.make_create_request(self.create_data)
eq_(r.status_code, 400)
def test_fake_license(self):
data = self.create_data.copy()
data['builtin'] = 'fff'
r = self.make_create_request(data)
eq_(r.status_code, 400, r.content)
eq_(r.content, 'Bad Request: Invalid data provided: '
'Select a valid choice. fff is not one of the available choices. '
'(builtin)')
@patch('zipfile.ZipFile.infolist')
def test_bad_zip(self, infolist):
fake = Mock()
fake.filename = '..'
infolist.return_value = [fake]
r = self.make_create_request(self.create_data)
eq_(r.status_code, 400, r.content)
@patch('versions.models.AppVersion.objects.get')
def test_bad_appversion(self, get):
get.side_effect = AppVersion.DoesNotExist()
data = self.create_addon()
assert data, "We didn't get data."
def test_wrong_guid(self):
data = self.create_addon()
id = data['id']
addon = Addon.objects.get(pk=id)
addon.guid = 'XXX'
addon.save()
# Upload new version of file
r = oclient.post(('api.versions', id,), self.accepted_consumer,
self.token, data=self.version_data)
eq_(r.status_code, 400)
eq_(r.content, 'Bad Request: Add-on did not validate: '
"UUID doesn't match add-on.")
def test_duplicate_guid(self):
self.create_addon()
data = self.create_data.copy()
data['xpi'] = self.version_data['xpi']
r = self.make_create_request(data)
eq_(r.status_code, 400)
eq_(r.content, 'Bad Request: Add-on did not validate: '
'Duplicate UUID found.')
def test_create_version(self):
# Create an addon and let's use this for the new version.
data = self.create_addon()
id = data['id']
log_count = activitylog_count()
# Upload new version of file
r = oclient.post(('api.versions', id,), self.accepted_consumer,
self.token, data=self.version_data)
eq_(r.status_code, 200, r.content)
# verify we've logged a new version and a new app version
eq_(log_count + 2, activitylog_count())
# validate that the addon has 2 versions
a = Addon.objects.get(pk=id)
eq_(a.versions.all().count(), 2)
# validate the version number
v = a.versions.get(version='0.2')
eq_(v.version, '0.2')
# validate any new version data
eq_(amo.PLATFORMS[v.files.get().platform].shortname, 'windows')
def test_create_version_bad_license(self):
data = self.create_addon()
id = data['id']
data = self.version_data.copy()
data['builtin'] = 'fu'
r = oclient.post(('api.versions', id,), self.accepted_consumer,
self.token, data=data)
eq_(r.status_code, 400, r.content)
def test_create_version_no_license(self):
data = self.create_addon()
id = data['id']
data = self.version_data.copy()
del data['builtin']
r = oclient.post(('api.versions', id,), self.accepted_consumer,
self.token, data=data)
eq_(r.status_code, 200, r.content)
data = json.loads(r.content)
id = data['id']
v = Version.objects.get(pk=id)
assert not v.license
def create_for_update(self):
data = self.create_addon()
id = data['id']
a = Addon.objects.get(pk=id)
v = a.versions.get()
eq_(v.version, '0.1')
return a, v, 'apps/files/fixtures/files/extension-0.2.xpi'
def test_update_version_no_license(self):
a, v, path = self.create_for_update()
data = dict(release_notes='fukyeah', platform='windows',
xpi=open(os.path.join(settings.ROOT, path)))
r = oclient.put(('api.version', a.id, v.id), self.accepted_consumer,
self.token, data=data, content_type=MULTIPART_CONTENT)
eq_(r.status_code, 200, r.content)
v = a.versions.get()
eq_(v.version, '0.2')
eq_(v.license, None)
def test_update_version_bad_license(self):
a, v, path = self.create_for_update()
data = dict(release_notes='fukyeah', builtin=3, platform='windows',
xpi=open(os.path.join(settings.ROOT, path)))
r = oclient.put(('api.version', a.id, v.id), self.accepted_consumer,
self.token, data=data, content_type=MULTIPART_CONTENT)
eq_(r.status_code, 400, r.content)
def test_update_version(self):
a, v, path = self.create_for_update()
data = dict(release_notes='fukyeah', builtin=2, platform='windows',
xpi=open(os.path.join(settings.ROOT, path)))
log_count = activitylog_count()
# upload new version
r = oclient.put(('api.version', a.id, v.id), self.accepted_consumer,
self.token, data=data, content_type=MULTIPART_CONTENT)
eq_(r.status_code, 200, r.content[:1000])
# verify we've logged a version update and a new app version
eq_(activitylog_count(), log_count + 2)
# verify data
v = a.versions.get()
eq_(v.version, '0.2')
eq_(str(v.releasenotes), 'fukyeah')
eq_(str(v.license.builtin), '2')
def test_update_version_bad_xpi(self):
data = self.create_addon()
id = data['id']
# verify version
a = Addon.objects.get(pk=id)
v = a.versions.get()
eq_(v.version, '0.1')
data = dict(release_notes='fukyeah', platform='windows')
# upload new version
r = oclient.put(('api.version', id, v.id), self.accepted_consumer,
self.token, data=data, content_type=MULTIPART_CONTENT)
eq_(r.status_code, 400)
def test_update_version_bad_id(self):
r = oclient.put(('api.version', 0, 0), self.accepted_consumer,
self.token, data={}, content_type=MULTIPART_CONTENT)
eq_(r.status_code, 410, r.content)
def test_get_version(self):
data = self.create_addon()
a = Addon.objects.get(pk=data['id'])
r = oclient.get(('api.version', data['id'], a.versions.get().id),
self.accepted_consumer, self.token)
eq_(r.status_code, 200)
def test_get_version_statuses(self):
data = self.create_addon()
a = Addon.objects.get(pk=data['id'])
r = oclient.get(('api.version', data['id'], a.versions.get().id),
self.accepted_consumer, self.token)
eq_(json.loads(r.content)['statuses'],
[[File.objects.all()[0].pk, 1]])
@patch('api.authorization.AllowRelatedAppOwner.has_object_permission')
@patch('api.authorization.AllowAppOwner.has_object_permission')
@patch('access.acl.action_allowed')
@patch('access.acl.check_addon_ownership')
def test_not_my_addon(self, addon_ownership, action_allowed,
app_owner, related_app_owner):
data = self.create_addon()
id = data['id']
a = Addon.objects.get(pk=id)
v = a.versions.get()
# The first one is for piston, the 3 next ones are for DRF.
addon_ownership.return_value = False
action_allowed.return_value = False
app_owner.return_value = False
related_app_owner.return_value = False
r = oclient.put(('api.version', id, v.id), self.accepted_consumer,
self.token, data={}, content_type=MULTIPART_CONTENT)
eq_(r.status_code, self.permission_denied_http_status, r.content)
r = oclient.put(('api.addon', id), self.accepted_consumer, self.token,
data=self.update_data)
eq_(r.status_code, self.permission_denied_http_status, r.content)
def test_delete_version(self):
data = self.create_addon()
id = data['id']
a = Addon.objects.get(pk=id)
v = a.versions.get()
log_count = activitylog_count()
r = oclient.delete(('api.version', id, v.id), self.accepted_consumer,
self.token)
eq_(activitylog_count(), log_count + 1)
eq_(r.status_code, 204, r.content)
eq_(a.versions.count(), 0)
def test_retrieve_versions(self):
data = self.create_addon()
id = data['id']
a = Addon.objects.get(pk=id)
v = a.versions.get()
r = oclient.get(('api.versions', id), self.accepted_consumer,
self.token)
eq_(r.status_code, 200, r.content)
data = json.loads(r.content)
for attr in ('id', 'version',):
expect = getattr(v, attr)
val = data[0].get(attr)
eq_(expect, val,
'Got "%s" was expecting "%s" for "%s".' % (val, expect, attr,))
def test_no_addons(self):
r = oclient.get('api.addons', self.accepted_consumer, self.token)
eq_(json.loads(r.content)['count'], 0)
def test_no_user(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
AddonUser.objects.create(addon=addon, user=self.admin,
role=amo.AUTHOR_ROLE_DEV)
r = oclient.get('api.addons', self.accepted_consumer, self.token)
eq_(json.loads(r.content)['count'], 0)
def test_my_addons_only(self):
for num in range(0, 2):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
AddonUser.objects.create(addon=addon, user=self.editor,
role=amo.AUTHOR_ROLE_DEV)
r = oclient.get('api.addons', self.accepted_consumer, self.token,
params={'authenticate_as': self.editor.pk})
j = json.loads(r.content)
eq_(j['count'], 1)
eq_(j['objects'][0]['id'], addon.id)
def test_one_addon(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
AddonUser.objects.create(addon=addon, user=self.editor,
role=amo.AUTHOR_ROLE_DEV)
r = oclient.get(('api.addon', addon.pk), self.accepted_consumer,
self.token, params={'authenticate_as': self.editor.pk})
eq_(json.loads(r.content)['id'], addon.pk)
def test_my_addons_role(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
AddonUser.objects.create(addon=addon, user=self.editor,
role=amo.AUTHOR_ROLE_VIEWER)
r = oclient.get('api.addons', self.accepted_consumer, self.token)
eq_(json.loads(r.content)['count'], 0)
def test_my_addons_disabled(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION,
status=amo.STATUS_DISABLED)
AddonUser.objects.create(addon=addon, user=self.editor,
role=amo.AUTHOR_ROLE_DEV)
r = oclient.get('api.addons', self.accepted_consumer, self.token)
eq_(json.loads(r.content)['count'], 0)
def test_my_addons_deleted(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION,
status=amo.STATUS_DELETED)
AddonUser.objects.create(addon=addon, user=self.editor,
role=amo.AUTHOR_ROLE_DEV)
r = oclient.get('api.addons', self.accepted_consumer, self.token)
eq_(json.loads(r.content)['count'], 0)
class TestDRFAddon(TestAddon):
created_http_status = 201
permission_denied_http_status = 403
def setUp(self):
super(TestDRFAddon, self).setUp()
self.create_switch('drf', db=True)
def _compare_dicts(self, drf_data, piston_data):
"""
Given 2 dicts of data from DRF and Piston, compare keys then values.
"""
eq_(sorted(drf_data.keys()), sorted(piston_data.keys()),
('Keys inexistent from Piston: {0}\n'
'Keys inexistent from DRF: {1}').format(
set(piston_data) - set(drf_data),
set(drf_data) - set(piston_data)))
for drf_item, piston_item in zip(sorted(drf_data.items()),
sorted(piston_data.items())):
eq_(drf_item[0], piston_item[0])
eq_(drf_item[1], piston_item[1],
('Different representations for key "{0}": DRF={1}, Piston={2}'
.format(drf_item[0], drf_item[1], piston_item[1])))
def compare_output(self, url, listed=False):
"""
Load responses from DRF and Piston given the `url` parameter and
compare returned data dicts, key by key. Useful to make sure
that both responses are similar.
Set `listed` to True for comparing responses as lists.
"""
r = oclient.get(url, self.accepted_consumer, self.token)
eq_(r.status_code, 200, r.content)
drf_data = json.loads(r.content)
self.create_switch('drf', **{'active': False})
r = oclient.get(url, self.accepted_consumer, self.token)
eq_(r.status_code, 200, r.content)
piston_data = json.loads(r.content)
if listed:
eq_(len(drf_data), len(piston_data))
for items in zip(drf_data, piston_data):
self._compare_dicts(items[0], items[1])
else:
self._compare_dicts(drf_data, piston_data)
def test_diff_versions(self):
data = self.create_addon()
self.compare_output(('api.versions', data['id']), listed=True)
def test_diff_version(self):
data = self.create_addon()
addon = Addon.objects.get(pk=data['id'])
version = addon.versions.get()
self.compare_output(('api.version', addon.id, version.id))
def test_diff_addons(self):
self.create_addon()
self.compare_output(('api.addons'))
def test_diff_addon(self):
data = self.create_addon()
self.compare_output(('api.addon', data['id']))
class TestPerformanceAPI(BaseOAuth):
fixtures = ['base/users']
def get_data(self):
return {
'os': 'WINNT',
'version': '123',
'platform': 'x86',
'product': 'firefox',
'product_version': 'x.y.z',
'average': '1.25',
'test': 'ts'
}
def make_create_request(self, data):
return oclient.post('api.performance.add', self.accepted_consumer,
self.token, data=data)
def test_form_fails(self):
res = self.make_create_request({})
eq_(res.status_code, 400)
def test_not_allowed(self):
res = self.client.post(reverse('api.performance.add'), {})
eq_(res.status_code, 401)
def test_form_incomplete(self):
data = self.get_data()
del data['test']
res = self.make_create_request(data)
eq_(res.status_code, 400)
assert 'This field is required. (test)' in res.content
def test_form_validate(self):
data = self.get_data()
data['os'] = 'WebOS hotness'
res = self.make_create_request(data)
eq_(res.status_code, 400)
assert 'WebOS hotness' in res.content
def test_no_addon(self):
data = self.get_data()
data['addon_id'] = '123'
res = self.make_create_request(data)
eq_(res.status_code, 400)
assert 'Add-on not found' in res.content
def test_addon(self):
data = self.get_data()
data['addon_id'] = Addon.objects.create(type=amo.ADDON_EXTENSION).pk
res = self.make_create_request(data)
eq_(res.status_code, 200)
perfs = Performance.objects.all()
eq_(perfs[0].addon_id, data['addon_id'])
def test_form_data(self):
res = self.make_create_request(self.get_data())
eq_(res.status_code, 200)
perfs = Performance.objects.all()
eq_(perfs.count(), 1)
eq_(perfs[0].average, 1.25)
def test_form_updates(self):
self.test_form_data()
data = self.get_data()
data['average'] = 1.3
self.make_create_request(data)
perfs = Performance.objects.all()
eq_(len(perfs), 1)
eq_(perfs[0].average, 1.3)
def test_creates_app_version(self):
self.test_form_data()
apps = PerformanceAppVersions.objects.all()
eq_(len(apps), 1)
eq_(apps[0].app, 'firefox')
eq_(apps[0].version, 'x.y.z')
def test_gets_app_version(self):
self.test_form_data()
eq_(PerformanceAppVersions.objects.all().count(), 1)
self.test_form_data()
eq_(PerformanceAppVersions.objects.all().count(), 1)
def test_creates_os_version(self):
self.test_form_data()
apps = PerformanceOSVersion.objects.all()
eq_(apps.count(), 1)
eq_(apps[0].os, 'WINNT')
def test_gets_os_version(self):
self.test_form_data()
eq_(PerformanceOSVersion.objects.all().count(), 1)
self.test_form_data()
eq_(PerformanceOSVersion.objects.all().count(), 1)
| bsd-3-clause | 21,487,230,726,686,892 | -833,297,473,087,752,600 | 36.153928 | 79 | 0.579159 | false |
jcoady9/python-for-android | python3-alpha/python3-src/Lib/test/test_profile.py | 91 | 7006 | """Test suite for the profile module."""
import sys
import pstats
import unittest
from difflib import unified_diff
from io import StringIO
from test.support import run_unittest
import profile
from test.profilee import testfunc, timer
class ProfileTest(unittest.TestCase):
profilerclass = profile.Profile
methodnames = ['print_stats', 'print_callers', 'print_callees']
expected_max_output = ':0(max)'
def get_expected_output(self):
return _ProfileOutput
@classmethod
def do_profiling(cls):
results = []
prof = cls.profilerclass(timer, 0.001)
start_timer = timer()
prof.runctx("testfunc()", globals(), locals())
results.append(timer() - start_timer)
for methodname in cls.methodnames:
s = StringIO()
stats = pstats.Stats(prof, stream=s)
stats.strip_dirs().sort_stats("stdname")
getattr(stats, methodname)()
output = s.getvalue().splitlines()
mod_name = testfunc.__module__.rsplit('.', 1)[1]
# Only compare against stats originating from the test file.
# Prevents outside code (e.g., the io module) from causing
# unexpected output.
output = [line.rstrip() for line in output if mod_name in line]
results.append('\n'.join(output))
return results
def test_cprofile(self):
results = self.do_profiling()
expected = self.get_expected_output()
self.assertEqual(results[0], 1000)
for i, method in enumerate(self.methodnames):
if results[i+1] != expected[method]:
print("Stats.%s output for %s doesn't fit expectation!" %
(method, self.profilerclass.__name__))
print('\n'.join(unified_diff(
results[i+1].split('\n'),
expected[method].split('\n'))))
def test_calling_conventions(self):
# Issue #5330: profile and cProfile wouldn't report C functions called
# with keyword arguments. We test all calling conventions.
stmts = [
"max([0])",
"max([0], key=int)",
"max([0], **dict(key=int))",
"max(*([0],))",
"max(*([0],), key=int)",
"max(*([0],), **dict(key=int))",
]
for stmt in stmts:
s = StringIO()
prof = self.profilerclass(timer, 0.001)
prof.runctx(stmt, globals(), locals())
stats = pstats.Stats(prof, stream=s)
stats.print_stats()
res = s.getvalue()
self.assertIn(self.expected_max_output, res,
"Profiling {0!r} didn't report max:\n{1}".format(stmt, res))
def regenerate_expected_output(filename, cls):
filename = filename.rstrip('co')
print('Regenerating %s...' % filename)
results = cls.do_profiling()
newfile = []
with open(filename, 'r') as f:
for line in f:
newfile.append(line)
if line.startswith('#--cut'):
break
with open(filename, 'w') as f:
f.writelines(newfile)
f.write("_ProfileOutput = {}\n")
for i, method in enumerate(cls.methodnames):
f.write('_ProfileOutput[%r] = """\\\n%s"""\n' % (
method, results[i+1]))
f.write('\nif __name__ == "__main__":\n main()\n')
def test_main():
run_unittest(ProfileTest)
def main():
if '-r' not in sys.argv:
test_main()
else:
regenerate_expected_output(__file__, ProfileTest)
# Don't remove this comment. Everything below it is auto-generated.
#--cut--------------------------------------------------------------------------
_ProfileOutput = {}
_ProfileOutput['print_stats'] = """\
28 27.972 0.999 27.972 0.999 profilee.py:110(__getattr__)
1 269.996 269.996 999.769 999.769 profilee.py:25(testfunc)
23/3 149.937 6.519 169.917 56.639 profilee.py:35(factorial)
20 19.980 0.999 19.980 0.999 profilee.py:48(mul)
2 39.986 19.993 599.830 299.915 profilee.py:55(helper)
4 115.984 28.996 119.964 29.991 profilee.py:73(helper1)
2 -0.006 -0.003 139.946 69.973 profilee.py:84(helper2_indirect)
8 311.976 38.997 399.912 49.989 profilee.py:88(helper2)
8 63.976 7.997 79.960 9.995 profilee.py:98(subhelper)"""
_ProfileOutput['print_callers'] = """\
:0(append) <- profilee.py:73(helper1)(4) 119.964
:0(exc_info) <- profilee.py:73(helper1)(4) 119.964
:0(hasattr) <- profilee.py:73(helper1)(4) 119.964
profilee.py:88(helper2)(8) 399.912
profilee.py:110(__getattr__) <- :0(hasattr)(12) 11.964
profilee.py:98(subhelper)(16) 79.960
profilee.py:25(testfunc) <- <string>:1(<module>)(1) 999.767
profilee.py:35(factorial) <- profilee.py:25(testfunc)(1) 999.769
profilee.py:35(factorial)(20) 169.917
profilee.py:84(helper2_indirect)(2) 139.946
profilee.py:48(mul) <- profilee.py:35(factorial)(20) 169.917
profilee.py:55(helper) <- profilee.py:25(testfunc)(2) 999.769
profilee.py:73(helper1) <- profilee.py:55(helper)(4) 599.830
profilee.py:84(helper2_indirect) <- profilee.py:55(helper)(2) 599.830
profilee.py:88(helper2) <- profilee.py:55(helper)(6) 599.830
profilee.py:84(helper2_indirect)(2) 139.946
profilee.py:98(subhelper) <- profilee.py:88(helper2)(8) 399.912"""
_ProfileOutput['print_callees'] = """\
:0(hasattr) -> profilee.py:110(__getattr__)(12) 27.972
<string>:1(<module>) -> profilee.py:25(testfunc)(1) 999.769
profilee.py:110(__getattr__) ->
profilee.py:25(testfunc) -> profilee.py:35(factorial)(1) 169.917
profilee.py:55(helper)(2) 599.830
profilee.py:35(factorial) -> profilee.py:35(factorial)(20) 169.917
profilee.py:48(mul)(20) 19.980
profilee.py:48(mul) ->
profilee.py:55(helper) -> profilee.py:73(helper1)(4) 119.964
profilee.py:84(helper2_indirect)(2) 139.946
profilee.py:88(helper2)(6) 399.912
profilee.py:73(helper1) -> :0(append)(4) -0.004
profilee.py:84(helper2_indirect) -> profilee.py:35(factorial)(2) 169.917
profilee.py:88(helper2)(2) 399.912
profilee.py:88(helper2) -> :0(hasattr)(8) 11.964
profilee.py:98(subhelper)(8) 79.960
profilee.py:98(subhelper) -> profilee.py:110(__getattr__)(16) 27.972"""
if __name__ == "__main__":
main()
| apache-2.0 | -3,631,582,127,036,499,500 | -847,815,776,528,524,900 | 42.7875 | 81 | 0.535826 | false |
ShinyROM/android_external_chromium_org | third_party/android_testrunner/errors.py | 171 | 1340 | #!/usr/bin/python2.4
#
#
# Copyright 2008, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines common exception classes for this package."""
class MsgException(Exception):
"""Generic exception with an optional string msg."""
def __init__(self, msg=""):
self.msg = msg
class WaitForResponseTimedOutError(Exception):
"""We sent a command and had to wait too long for response."""
class DeviceUnresponsiveError(Exception):
"""Device is unresponsive to command."""
class InstrumentationError(Exception):
"""Failed to run instrumentation."""
class AbortError(MsgException):
"""Generic exception that indicates a fatal error has occurred and program
execution should be aborted."""
class ParseError(MsgException):
"""Raised when xml data to parse has unrecognized format."""
| bsd-3-clause | 7,670,005,740,539,253,000 | -2,465,741,873,444,287,500 | 28.130435 | 76 | 0.744776 | false |
40223243/2015cd_midterm2 | 2015cd_midterm-master/static/Brython3.1.1-20150328-091302/Lib/csv.py | 637 | 16166 |
"""
csv.py - read/write/investigate CSV files
"""
import re
from _csv import Error, __version__, writer, reader, register_dialect, \
unregister_dialect, get_dialect, list_dialects, \
field_size_limit, \
QUOTE_MINIMAL, QUOTE_ALL, QUOTE_NONNUMERIC, QUOTE_NONE, \
__doc__
from _csv import Dialect as _Dialect
from io import StringIO
__all__ = [ "QUOTE_MINIMAL", "QUOTE_ALL", "QUOTE_NONNUMERIC", "QUOTE_NONE",
"Error", "Dialect", "__doc__", "excel", "excel_tab",
"field_size_limit", "reader", "writer",
"register_dialect", "get_dialect", "list_dialects", "Sniffer",
"unregister_dialect", "__version__", "DictReader", "DictWriter" ]
class Dialect:
"""Describe a CSV dialect.
This must be subclassed (see csv.excel). Valid attributes are:
delimiter, quotechar, escapechar, doublequote, skipinitialspace,
lineterminator, quoting.
"""
_name = ""
_valid = False
# placeholders
delimiter = None
quotechar = None
escapechar = None
doublequote = None
skipinitialspace = None
lineterminator = None
quoting = None
def __init__(self):
if self.__class__ != Dialect:
self._valid = True
self._validate()
def _validate(self):
try:
_Dialect(self)
except TypeError as e:
# We do this for compatibility with py2.3
raise Error(str(e))
class excel(Dialect):
"""Describe the usual properties of Excel-generated CSV files."""
delimiter = ','
quotechar = '"'
doublequote = True
skipinitialspace = False
lineterminator = '\r\n'
quoting = QUOTE_MINIMAL
register_dialect("excel", excel)
class excel_tab(excel):
"""Describe the usual properties of Excel-generated TAB-delimited files."""
delimiter = '\t'
register_dialect("excel-tab", excel_tab)
class unix_dialect(Dialect):
"""Describe the usual properties of Unix-generated CSV files."""
delimiter = ','
quotechar = '"'
doublequote = True
skipinitialspace = False
lineterminator = '\n'
quoting = QUOTE_ALL
register_dialect("unix", unix_dialect)
class DictReader:
def __init__(self, f, fieldnames=None, restkey=None, restval=None,
dialect="excel", *args, **kwds):
self._fieldnames = fieldnames # list of keys for the dict
self.restkey = restkey # key to catch long rows
self.restval = restval # default value for short rows
self.reader = reader(f, dialect, *args, **kwds)
self.dialect = dialect
self.line_num = 0
def __iter__(self):
return self
@property
def fieldnames(self):
if self._fieldnames is None:
try:
self._fieldnames = next(self.reader)
except StopIteration:
pass
self.line_num = self.reader.line_num
return self._fieldnames
@fieldnames.setter
def fieldnames(self, value):
self._fieldnames = value
def __next__(self):
if self.line_num == 0:
# Used only for its side effect.
self.fieldnames
row = next(self.reader)
self.line_num = self.reader.line_num
# unlike the basic reader, we prefer not to return blanks,
# because we will typically wind up with a dict full of None
# values
while row == []:
row = next(self.reader)
d = dict(zip(self.fieldnames, row))
lf = len(self.fieldnames)
lr = len(row)
if lf < lr:
d[self.restkey] = row[lf:]
elif lf > lr:
for key in self.fieldnames[lr:]:
d[key] = self.restval
return d
class DictWriter:
def __init__(self, f, fieldnames, restval="", extrasaction="raise",
dialect="excel", *args, **kwds):
self.fieldnames = fieldnames # list of keys for the dict
self.restval = restval # for writing short dicts
if extrasaction.lower() not in ("raise", "ignore"):
raise ValueError("extrasaction (%s) must be 'raise' or 'ignore'"
% extrasaction)
self.extrasaction = extrasaction
self.writer = writer(f, dialect, *args, **kwds)
def writeheader(self):
header = dict(zip(self.fieldnames, self.fieldnames))
self.writerow(header)
def _dict_to_list(self, rowdict):
if self.extrasaction == "raise":
wrong_fields = [k for k in rowdict if k not in self.fieldnames]
if wrong_fields:
raise ValueError("dict contains fields not in fieldnames: "
+ ", ".join(wrong_fields))
return [rowdict.get(key, self.restval) for key in self.fieldnames]
def writerow(self, rowdict):
return self.writer.writerow(self._dict_to_list(rowdict))
def writerows(self, rowdicts):
rows = []
for rowdict in rowdicts:
rows.append(self._dict_to_list(rowdict))
return self.writer.writerows(rows)
# Guard Sniffer's type checking against builds that exclude complex()
try:
complex
except NameError:
complex = float
class Sniffer:
'''
"Sniffs" the format of a CSV file (i.e. delimiter, quotechar)
Returns a Dialect object.
'''
def __init__(self):
# in case there is more than one possible delimiter
self.preferred = [',', '\t', ';', ' ', ':']
def sniff(self, sample, delimiters=None):
"""
Returns a dialect (or None) corresponding to the sample
"""
quotechar, doublequote, delimiter, skipinitialspace = \
self._guess_quote_and_delimiter(sample, delimiters)
if not delimiter:
delimiter, skipinitialspace = self._guess_delimiter(sample,
delimiters)
if not delimiter:
raise Error("Could not determine delimiter")
class dialect(Dialect):
_name = "sniffed"
lineterminator = '\r\n'
quoting = QUOTE_MINIMAL
# escapechar = ''
dialect.doublequote = doublequote
dialect.delimiter = delimiter
# _csv.reader won't accept a quotechar of ''
dialect.quotechar = quotechar or '"'
dialect.skipinitialspace = skipinitialspace
return dialect
def _guess_quote_and_delimiter(self, data, delimiters):
"""
Looks for text enclosed between two identical quotes
(the probable quotechar) which are preceded and followed
by the same character (the probable delimiter).
For example:
,'some text',
The quote with the most wins, same with the delimiter.
If there is no quotechar the delimiter can't be determined
this way.
"""
matches = []
for restr in ('(?P<delim>[^\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?P=delim)', # ,".*?",
'(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?P<delim>[^\w\n"\'])(?P<space> ?)', # ".*?",
'(?P<delim>>[^\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?:$|\n)', # ,".*?"
'(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?:$|\n)'): # ".*?" (no delim, no space)
regexp = re.compile(restr, re.DOTALL | re.MULTILINE)
matches = regexp.findall(data)
if matches:
break
if not matches:
# (quotechar, doublequote, delimiter, skipinitialspace)
return ('', False, None, 0)
quotes = {}
delims = {}
spaces = 0
for m in matches:
n = regexp.groupindex['quote'] - 1
key = m[n]
if key:
quotes[key] = quotes.get(key, 0) + 1
try:
n = regexp.groupindex['delim'] - 1
key = m[n]
except KeyError:
continue
if key and (delimiters is None or key in delimiters):
delims[key] = delims.get(key, 0) + 1
try:
n = regexp.groupindex['space'] - 1
except KeyError:
continue
if m[n]:
spaces += 1
quotechar = max(quotes, key=quotes.get)
if delims:
delim = max(delims, key=delims.get)
skipinitialspace = delims[delim] == spaces
if delim == '\n': # most likely a file with a single column
delim = ''
else:
# there is *no* delimiter, it's a single column of quoted data
delim = ''
skipinitialspace = 0
# if we see an extra quote between delimiters, we've got a
# double quoted format
dq_regexp = re.compile(
r"((%(delim)s)|^)\W*%(quote)s[^%(delim)s\n]*%(quote)s[^%(delim)s\n]*%(quote)s\W*((%(delim)s)|$)" % \
{'delim':re.escape(delim), 'quote':quotechar}, re.MULTILINE)
if dq_regexp.search(data):
doublequote = True
else:
doublequote = False
return (quotechar, doublequote, delim, skipinitialspace)
def _guess_delimiter(self, data, delimiters):
"""
The delimiter /should/ occur the same number of times on
each row. However, due to malformed data, it may not. We don't want
an all or nothing approach, so we allow for small variations in this
number.
1) build a table of the frequency of each character on every line.
2) build a table of frequencies of this frequency (meta-frequency?),
e.g. 'x occurred 5 times in 10 rows, 6 times in 1000 rows,
7 times in 2 rows'
3) use the mode of the meta-frequency to determine the /expected/
frequency for that character
4) find out how often the character actually meets that goal
5) the character that best meets its goal is the delimiter
For performance reasons, the data is evaluated in chunks, so it can
try and evaluate the smallest portion of the data possible, evaluating
additional chunks as necessary.
"""
data = list(filter(None, data.split('\n')))
ascii = [chr(c) for c in range(127)] # 7-bit ASCII
# build frequency tables
chunkLength = min(10, len(data))
iteration = 0
charFrequency = {}
modes = {}
delims = {}
start, end = 0, min(chunkLength, len(data))
while start < len(data):
iteration += 1
for line in data[start:end]:
for char in ascii:
metaFrequency = charFrequency.get(char, {})
# must count even if frequency is 0
freq = line.count(char)
# value is the mode
metaFrequency[freq] = metaFrequency.get(freq, 0) + 1
charFrequency[char] = metaFrequency
for char in charFrequency.keys():
items = list(charFrequency[char].items())
if len(items) == 1 and items[0][0] == 0:
continue
# get the mode of the frequencies
if len(items) > 1:
modes[char] = max(items, key=lambda x: x[1])
# adjust the mode - subtract the sum of all
# other frequencies
items.remove(modes[char])
modes[char] = (modes[char][0], modes[char][1]
- sum(item[1] for item in items))
else:
modes[char] = items[0]
# build a list of possible delimiters
modeList = modes.items()
total = float(chunkLength * iteration)
# (rows of consistent data) / (number of rows) = 100%
consistency = 1.0
# minimum consistency threshold
threshold = 0.9
while len(delims) == 0 and consistency >= threshold:
for k, v in modeList:
if v[0] > 0 and v[1] > 0:
if ((v[1]/total) >= consistency and
(delimiters is None or k in delimiters)):
delims[k] = v
consistency -= 0.01
if len(delims) == 1:
delim = list(delims.keys())[0]
skipinitialspace = (data[0].count(delim) ==
data[0].count("%c " % delim))
return (delim, skipinitialspace)
# analyze another chunkLength lines
start = end
end += chunkLength
if not delims:
return ('', 0)
# if there's more than one, fall back to a 'preferred' list
if len(delims) > 1:
for d in self.preferred:
if d in delims.keys():
skipinitialspace = (data[0].count(d) ==
data[0].count("%c " % d))
return (d, skipinitialspace)
# nothing else indicates a preference, pick the character that
# dominates(?)
items = [(v,k) for (k,v) in delims.items()]
items.sort()
delim = items[-1][1]
skipinitialspace = (data[0].count(delim) ==
data[0].count("%c " % delim))
return (delim, skipinitialspace)
def has_header(self, sample):
# Creates a dictionary of types of data in each column. If any
# column is of a single type (say, integers), *except* for the first
# row, then the first row is presumed to be labels. If the type
# can't be determined, it is assumed to be a string in which case
# the length of the string is the determining factor: if all of the
# rows except for the first are the same length, it's a header.
# Finally, a 'vote' is taken at the end for each column, adding or
# subtracting from the likelihood of the first row being a header.
rdr = reader(StringIO(sample), self.sniff(sample))
header = next(rdr) # assume first row is header
columns = len(header)
columnTypes = {}
for i in range(columns): columnTypes[i] = None
checked = 0
for row in rdr:
# arbitrary number of rows to check, to keep it sane
if checked > 20:
break
checked += 1
if len(row) != columns:
continue # skip rows that have irregular number of columns
for col in list(columnTypes.keys()):
for thisType in [int, float, complex]:
try:
thisType(row[col])
break
except (ValueError, OverflowError):
pass
else:
# fallback to length of string
thisType = len(row[col])
if thisType != columnTypes[col]:
if columnTypes[col] is None: # add new column type
columnTypes[col] = thisType
else:
# type is inconsistent, remove column from
# consideration
del columnTypes[col]
# finally, compare results against first row and "vote"
# on whether it's a header
hasHeader = 0
for col, colType in columnTypes.items():
if type(colType) == type(0): # it's a length
if len(header[col]) != colType:
hasHeader += 1
else:
hasHeader -= 1
else: # attempt typecast
try:
colType(header[col])
except (ValueError, TypeError):
hasHeader += 1
else:
hasHeader -= 1
return hasHeader > 0
| agpl-3.0 | -457,111,160,485,985,200 | -815,582,947,793,244,500 | 35.004454 | 131 | 0.526413 | false |
lukemarsden/compose | tests/unit/config_test.py | 19 | 23623 | import mock
import os
import shutil
import tempfile
from .. import unittest
from compose import config
def make_service_dict(name, service_dict, working_dir):
"""
Test helper function to contruct a ServiceLoader
"""
return config.ServiceLoader(working_dir=working_dir).make_service_dict(name, service_dict)
class ConfigTest(unittest.TestCase):
def test_load(self):
service_dicts = config.load(
config.ConfigDetails(
{
'foo': {'image': 'busybox'},
'bar': {'environment': ['FOO=1']},
},
'working_dir',
'filename.yml'
)
)
self.assertEqual(
sorted(service_dicts, key=lambda d: d['name']),
sorted([
{
'name': 'bar',
'environment': {'FOO': '1'},
},
{
'name': 'foo',
'image': 'busybox',
}
])
)
def test_load_throws_error_when_not_dict(self):
with self.assertRaises(config.ConfigurationError):
config.load(
config.ConfigDetails(
{'web': 'busybox:latest'},
'working_dir',
'filename.yml'
)
)
def test_config_validation(self):
self.assertRaises(
config.ConfigurationError,
lambda: make_service_dict('foo', {'port': ['8000']}, 'tests/')
)
make_service_dict('foo', {'ports': ['8000']}, 'tests/')
class VolumePathTest(unittest.TestCase):
@mock.patch.dict(os.environ)
def test_volume_binding_with_environ(self):
os.environ['VOLUME_PATH'] = '/host/path'
d = make_service_dict('foo', {'volumes': ['${VOLUME_PATH}:/container/path']}, working_dir='.')
self.assertEqual(d['volumes'], ['/host/path:/container/path'])
@mock.patch.dict(os.environ)
def test_volume_binding_with_home(self):
os.environ['HOME'] = '/home/user'
d = make_service_dict('foo', {'volumes': ['~:/container/path']}, working_dir='.')
self.assertEqual(d['volumes'], ['/home/user:/container/path'])
class MergePathMappingTest(object):
def config_name(self):
return ""
def test_empty(self):
service_dict = config.merge_service_dicts({}, {})
self.assertNotIn(self.config_name(), service_dict)
def test_no_override(self):
service_dict = config.merge_service_dicts(
{self.config_name(): ['/foo:/code', '/data']},
{},
)
self.assertEqual(set(service_dict[self.config_name()]), set(['/foo:/code', '/data']))
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{self.config_name(): ['/bar:/code']},
)
self.assertEqual(set(service_dict[self.config_name()]), set(['/bar:/code']))
def test_override_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name(): ['/foo:/code', '/data']},
{self.config_name(): ['/bar:/code']},
)
self.assertEqual(set(service_dict[self.config_name()]), set(['/bar:/code', '/data']))
def test_add_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name(): ['/foo:/code', '/data']},
{self.config_name(): ['/bar:/code', '/quux:/data']},
)
self.assertEqual(set(service_dict[self.config_name()]), set(['/bar:/code', '/quux:/data']))
def test_remove_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name(): ['/foo:/code', '/quux:/data']},
{self.config_name(): ['/bar:/code', '/data']},
)
self.assertEqual(set(service_dict[self.config_name()]), set(['/bar:/code', '/data']))
class MergeVolumesTest(unittest.TestCase, MergePathMappingTest):
def config_name(self):
return 'volumes'
class MergeDevicesTest(unittest.TestCase, MergePathMappingTest):
def config_name(self):
return 'devices'
class BuildOrImageMergeTest(unittest.TestCase):
def test_merge_build_or_image_no_override(self):
self.assertEqual(
config.merge_service_dicts({'build': '.'}, {}),
{'build': '.'},
)
self.assertEqual(
config.merge_service_dicts({'image': 'redis'}, {}),
{'image': 'redis'},
)
def test_merge_build_or_image_override_with_same(self):
self.assertEqual(
config.merge_service_dicts({'build': '.'}, {'build': './web'}),
{'build': './web'},
)
self.assertEqual(
config.merge_service_dicts({'image': 'redis'}, {'image': 'postgres'}),
{'image': 'postgres'},
)
def test_merge_build_or_image_override_with_other(self):
self.assertEqual(
config.merge_service_dicts({'build': '.'}, {'image': 'redis'}),
{'image': 'redis'}
)
self.assertEqual(
config.merge_service_dicts({'image': 'redis'}, {'build': '.'}),
{'build': '.'}
)
class MergeListsTest(unittest.TestCase):
def test_empty(self):
service_dict = config.merge_service_dicts({}, {})
self.assertNotIn('ports', service_dict)
def test_no_override(self):
service_dict = config.merge_service_dicts(
{'ports': ['10:8000', '9000']},
{},
)
self.assertEqual(set(service_dict['ports']), set(['10:8000', '9000']))
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{'ports': ['10:8000', '9000']},
)
self.assertEqual(set(service_dict['ports']), set(['10:8000', '9000']))
def test_add_item(self):
service_dict = config.merge_service_dicts(
{'ports': ['10:8000', '9000']},
{'ports': ['20:8000']},
)
self.assertEqual(set(service_dict['ports']), set(['10:8000', '9000', '20:8000']))
class MergeStringsOrListsTest(unittest.TestCase):
def test_no_override(self):
service_dict = config.merge_service_dicts(
{'dns': '8.8.8.8'},
{},
)
self.assertEqual(set(service_dict['dns']), set(['8.8.8.8']))
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{'dns': '8.8.8.8'},
)
self.assertEqual(set(service_dict['dns']), set(['8.8.8.8']))
def test_add_string(self):
service_dict = config.merge_service_dicts(
{'dns': ['8.8.8.8']},
{'dns': '9.9.9.9'},
)
self.assertEqual(set(service_dict['dns']), set(['8.8.8.8', '9.9.9.9']))
def test_add_list(self):
service_dict = config.merge_service_dicts(
{'dns': '8.8.8.8'},
{'dns': ['9.9.9.9']},
)
self.assertEqual(set(service_dict['dns']), set(['8.8.8.8', '9.9.9.9']))
class MergeLabelsTest(unittest.TestCase):
def test_empty(self):
service_dict = config.merge_service_dicts({}, {})
self.assertNotIn('labels', service_dict)
def test_no_override(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'labels': ['foo=1', 'bar']}, 'tests/'),
make_service_dict('foo', {}, 'tests/'),
)
self.assertEqual(service_dict['labels'], {'foo': '1', 'bar': ''})
def test_no_base(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {}, 'tests/'),
make_service_dict('foo', {'labels': ['foo=2']}, 'tests/'),
)
self.assertEqual(service_dict['labels'], {'foo': '2'})
def test_override_explicit_value(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'labels': ['foo=1', 'bar']}, 'tests/'),
make_service_dict('foo', {'labels': ['foo=2']}, 'tests/'),
)
self.assertEqual(service_dict['labels'], {'foo': '2', 'bar': ''})
def test_add_explicit_value(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'labels': ['foo=1', 'bar']}, 'tests/'),
make_service_dict('foo', {'labels': ['bar=2']}, 'tests/'),
)
self.assertEqual(service_dict['labels'], {'foo': '1', 'bar': '2'})
def test_remove_explicit_value(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'labels': ['foo=1', 'bar=2']}, 'tests/'),
make_service_dict('foo', {'labels': ['bar']}, 'tests/'),
)
self.assertEqual(service_dict['labels'], {'foo': '1', 'bar': ''})
class MemoryOptionsTest(unittest.TestCase):
def test_validation_fails_with_just_memswap_limit(self):
"""
When you set a 'memswap_limit' it is invalid config unless you also set
a mem_limit
"""
with self.assertRaises(config.ConfigurationError):
make_service_dict(
'foo', {
'memswap_limit': 2000000,
},
'tests/'
)
def test_validation_with_correct_memswap_values(self):
service_dict = make_service_dict(
'foo', {
'mem_limit': 1000000,
'memswap_limit': 2000000,
},
'tests/'
)
self.assertEqual(service_dict['memswap_limit'], 2000000)
class EnvTest(unittest.TestCase):
def test_parse_environment_as_list(self):
environment = [
'NORMAL=F1',
'CONTAINS_EQUALS=F=2',
'TRAILING_EQUALS=',
]
self.assertEqual(
config.parse_environment(environment),
{'NORMAL': 'F1', 'CONTAINS_EQUALS': 'F=2', 'TRAILING_EQUALS': ''},
)
def test_parse_environment_as_dict(self):
environment = {
'NORMAL': 'F1',
'CONTAINS_EQUALS': 'F=2',
'TRAILING_EQUALS': None,
}
self.assertEqual(config.parse_environment(environment), environment)
def test_parse_environment_invalid(self):
with self.assertRaises(config.ConfigurationError):
config.parse_environment('a=b')
def test_parse_environment_empty(self):
self.assertEqual(config.parse_environment(None), {})
@mock.patch.dict(os.environ)
def test_resolve_environment(self):
os.environ['FILE_DEF'] = 'E1'
os.environ['FILE_DEF_EMPTY'] = 'E2'
os.environ['ENV_DEF'] = 'E3'
service_dict = make_service_dict(
'foo', {
'environment': {
'FILE_DEF': 'F1',
'FILE_DEF_EMPTY': '',
'ENV_DEF': None,
'NO_DEF': None
},
},
'tests/'
)
self.assertEqual(
service_dict['environment'],
{'FILE_DEF': 'F1', 'FILE_DEF_EMPTY': '', 'ENV_DEF': 'E3', 'NO_DEF': ''},
)
def test_env_from_file(self):
service_dict = make_service_dict(
'foo',
{'env_file': 'one.env'},
'tests/fixtures/env',
)
self.assertEqual(
service_dict['environment'],
{'ONE': '2', 'TWO': '1', 'THREE': '3', 'FOO': 'bar'},
)
def test_env_from_multiple_files(self):
service_dict = make_service_dict(
'foo',
{'env_file': ['one.env', 'two.env']},
'tests/fixtures/env',
)
self.assertEqual(
service_dict['environment'],
{'ONE': '2', 'TWO': '1', 'THREE': '3', 'FOO': 'baz', 'DOO': 'dah'},
)
def test_env_nonexistent_file(self):
options = {'env_file': 'nonexistent.env'}
self.assertRaises(
config.ConfigurationError,
lambda: make_service_dict('foo', options, 'tests/fixtures/env'),
)
@mock.patch.dict(os.environ)
def test_resolve_environment_from_file(self):
os.environ['FILE_DEF'] = 'E1'
os.environ['FILE_DEF_EMPTY'] = 'E2'
os.environ['ENV_DEF'] = 'E3'
service_dict = make_service_dict(
'foo',
{'env_file': 'resolve.env'},
'tests/fixtures/env',
)
self.assertEqual(
service_dict['environment'],
{'FILE_DEF': 'F1', 'FILE_DEF_EMPTY': '', 'ENV_DEF': 'E3', 'NO_DEF': ''},
)
@mock.patch.dict(os.environ)
def test_resolve_path(self):
os.environ['HOSTENV'] = '/tmp'
os.environ['CONTAINERENV'] = '/host/tmp'
service_dict = make_service_dict(
'foo',
{'volumes': ['$HOSTENV:$CONTAINERENV']},
working_dir="tests/fixtures/env"
)
self.assertEqual(set(service_dict['volumes']), set(['/tmp:/host/tmp']))
service_dict = make_service_dict(
'foo',
{'volumes': ['/opt${HOSTENV}:/opt${CONTAINERENV}']},
working_dir="tests/fixtures/env"
)
self.assertEqual(set(service_dict['volumes']), set(['/opt/tmp:/opt/host/tmp']))
def load_from_filename(filename):
return config.load(config.find('.', filename))
class ExtendsTest(unittest.TestCase):
def test_extends(self):
service_dicts = load_from_filename('tests/fixtures/extends/docker-compose.yml')
service_dicts = sorted(
service_dicts,
key=lambda sd: sd['name'],
)
self.assertEqual(service_dicts, [
{
'name': 'mydb',
'image': 'busybox',
'command': 'top',
},
{
'name': 'myweb',
'image': 'busybox',
'command': 'top',
'links': ['mydb:db'],
'environment': {
"FOO": "1",
"BAR": "2",
"BAZ": "2",
},
}
])
def test_nested(self):
service_dicts = load_from_filename('tests/fixtures/extends/nested.yml')
self.assertEqual(service_dicts, [
{
'name': 'myweb',
'image': 'busybox',
'command': '/bin/true',
'environment': {
"FOO": "2",
"BAR": "2",
},
},
])
def test_self_referencing_file(self):
"""
We specify a 'file' key that is the filename we're already in.
"""
service_dicts = load_from_filename('tests/fixtures/extends/specify-file-as-self.yml')
self.assertEqual(service_dicts, [
{
'environment':
{
'YEP': '1', 'BAR': '1', 'BAZ': '3'
},
'image': 'busybox',
'name': 'myweb'
},
{
'environment':
{'YEP': '1'},
'name': 'otherweb'
},
{
'environment':
{'YEP': '1', 'BAZ': '3'},
'image': 'busybox',
'name': 'web'
}
])
def test_circular(self):
try:
load_from_filename('tests/fixtures/extends/circle-1.yml')
raise Exception("Expected config.CircularReference to be raised")
except config.CircularReference as e:
self.assertEqual(
[(os.path.basename(filename), service_name) for (filename, service_name) in e.trail],
[
('circle-1.yml', 'web'),
('circle-2.yml', 'web'),
('circle-1.yml', 'web'),
],
)
def test_extends_validation_empty_dictionary(self):
dictionary = {'extends': None}
def load_config():
return make_service_dict('myweb', dictionary, working_dir='tests/fixtures/extends')
self.assertRaisesRegexp(config.ConfigurationError, 'dictionary', load_config)
dictionary['extends'] = {}
self.assertRaises(config.ConfigurationError, load_config)
def test_extends_validation_missing_service_key(self):
dictionary = {'extends': {'file': 'common.yml'}}
def load_config():
return make_service_dict('myweb', dictionary, working_dir='tests/fixtures/extends')
self.assertRaisesRegexp(config.ConfigurationError, 'service', load_config)
def test_extends_validation_invalid_key(self):
dictionary = {
'extends':
{
'service': 'web', 'file': 'common.yml', 'what': 'is this'
}
}
def load_config():
return make_service_dict('myweb', dictionary, working_dir='tests/fixtures/extends')
self.assertRaisesRegexp(config.ConfigurationError, 'what', load_config)
def test_extends_validation_no_file_key_no_filename_set(self):
dictionary = {'extends': {'service': 'web'}}
def load_config():
return make_service_dict('myweb', dictionary, working_dir='tests/fixtures/extends')
self.assertRaisesRegexp(config.ConfigurationError, 'file', load_config)
def test_extends_validation_valid_config(self):
dictionary = {'extends': {'service': 'web', 'file': 'common.yml'}}
def load_config():
return make_service_dict('myweb', dictionary, working_dir='tests/fixtures/extends')
self.assertIsInstance(load_config(), dict)
def test_extends_file_defaults_to_self(self):
"""
Test not specifying a file in our extends options that the
config is valid and correctly extends from itself.
"""
service_dicts = load_from_filename('tests/fixtures/extends/no-file-specified.yml')
self.assertEqual(service_dicts, [
{
'name': 'myweb',
'image': 'busybox',
'environment': {
"BAR": "1",
"BAZ": "3",
}
},
{
'name': 'web',
'image': 'busybox',
'environment': {
"BAZ": "3",
}
}
])
def test_blacklisted_options(self):
def load_config():
return make_service_dict('myweb', {
'extends': {
'file': 'whatever',
'service': 'web',
}
}, '.')
with self.assertRaisesRegexp(config.ConfigurationError, 'links'):
other_config = {'web': {'links': ['db']}}
with mock.patch.object(config, 'load_yaml', return_value=other_config):
print load_config()
with self.assertRaisesRegexp(config.ConfigurationError, 'volumes_from'):
other_config = {'web': {'volumes_from': ['db']}}
with mock.patch.object(config, 'load_yaml', return_value=other_config):
print load_config()
with self.assertRaisesRegexp(config.ConfigurationError, 'net'):
other_config = {'web': {'net': 'container:db'}}
with mock.patch.object(config, 'load_yaml', return_value=other_config):
print load_config()
other_config = {'web': {'net': 'host'}}
with mock.patch.object(config, 'load_yaml', return_value=other_config):
print load_config()
def test_volume_path(self):
dicts = load_from_filename('tests/fixtures/volume-path/docker-compose.yml')
paths = [
'%s:/foo' % os.path.abspath('tests/fixtures/volume-path/common/foo'),
'%s:/bar' % os.path.abspath('tests/fixtures/volume-path/bar'),
]
self.assertEqual(set(dicts[0]['volumes']), set(paths))
def test_parent_build_path_dne(self):
child = load_from_filename('tests/fixtures/extends/nonexistent-path-child.yml')
self.assertEqual(child, [
{
'name': 'dnechild',
'image': 'busybox',
'command': '/bin/true',
'environment': {
"FOO": "1",
"BAR": "2",
},
},
])
class BuildPathTest(unittest.TestCase):
def setUp(self):
self.abs_context_path = os.path.join(os.getcwd(), 'tests/fixtures/build-ctx')
def test_nonexistent_path(self):
with self.assertRaises(config.ConfigurationError):
config.load(
config.ConfigDetails(
{
'foo': {'build': 'nonexistent.path'},
},
'working_dir',
'filename.yml'
)
)
def test_relative_path(self):
relative_build_path = '../build-ctx/'
service_dict = make_service_dict(
'relpath',
{'build': relative_build_path},
working_dir='tests/fixtures/build-path'
)
self.assertEquals(service_dict['build'], self.abs_context_path)
def test_absolute_path(self):
service_dict = make_service_dict(
'abspath',
{'build': self.abs_context_path},
working_dir='tests/fixtures/build-path'
)
self.assertEquals(service_dict['build'], self.abs_context_path)
def test_from_file(self):
service_dict = load_from_filename('tests/fixtures/build-path/docker-compose.yml')
self.assertEquals(service_dict, [{'name': 'foo', 'build': self.abs_context_path}])
class GetConfigPathTestCase(unittest.TestCase):
files = [
'docker-compose.yml',
'docker-compose.yaml',
'fig.yml',
'fig.yaml',
]
def test_get_config_path_default_file_in_basedir(self):
files = self.files
self.assertEqual('docker-compose.yml', get_config_filename_for_files(files[0:]))
self.assertEqual('docker-compose.yaml', get_config_filename_for_files(files[1:]))
self.assertEqual('fig.yml', get_config_filename_for_files(files[2:]))
self.assertEqual('fig.yaml', get_config_filename_for_files(files[3:]))
with self.assertRaises(config.ComposeFileNotFound):
get_config_filename_for_files([])
def test_get_config_path_default_file_in_parent_dir(self):
"""Test with files placed in the subdir"""
files = self.files
def get_config_in_subdir(files):
return get_config_filename_for_files(files, subdir=True)
self.assertEqual('docker-compose.yml', get_config_in_subdir(files[0:]))
self.assertEqual('docker-compose.yaml', get_config_in_subdir(files[1:]))
self.assertEqual('fig.yml', get_config_in_subdir(files[2:]))
self.assertEqual('fig.yaml', get_config_in_subdir(files[3:]))
with self.assertRaises(config.ComposeFileNotFound):
get_config_in_subdir([])
def get_config_filename_for_files(filenames, subdir=None):
def make_files(dirname, filenames):
for fname in filenames:
with open(os.path.join(dirname, fname), 'w') as f:
f.write('')
project_dir = tempfile.mkdtemp()
try:
make_files(project_dir, filenames)
if subdir:
base_dir = tempfile.mkdtemp(dir=project_dir)
else:
base_dir = project_dir
return os.path.basename(config.get_config_path(base_dir))
finally:
shutil.rmtree(project_dir)
| apache-2.0 | 1,705,473,262,400,353,800 | -2,043,864,727,195,707,000 | 32.46034 | 102 | 0.521187 | false |
wkoathp/glance | glance/db/sqlalchemy/metadef_api/property.py | 6 | 6161 | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db import exception as db_exc
from oslo_log import log as logging
from sqlalchemy import func
import sqlalchemy.orm as sa_orm
from glance.common import exception as exc
from glance.db.sqlalchemy.metadef_api import namespace as namespace_api
from glance.db.sqlalchemy.metadef_api import utils as metadef_utils
from glance.db.sqlalchemy import models_metadef as models
from glance import i18n
LOG = logging.getLogger(__name__)
_ = i18n._
_LW = i18n._LW
def _get(context, property_id, session):
try:
query = session.query(models.MetadefProperty)\
.filter_by(id=property_id)
property_rec = query.one()
except sa_orm.exc.NoResultFound:
msg = (_("Metadata definition property not found for id=%s")
% property_id)
LOG.warn(msg)
raise exc.MetadefPropertyNotFound(msg)
return property_rec
def _get_by_name(context, namespace_name, name, session):
"""get a property; raise if ns not found/visible or property not found"""
namespace = namespace_api.get(context, namespace_name, session)
try:
query = session.query(models.MetadefProperty)\
.filter_by(name=name, namespace_id=namespace['id'])
property_rec = query.one()
except sa_orm.exc.NoResultFound:
msg = ("The metadata definition property with name=%(name)s"
" was not found in namespace=%(namespace_name)s."
% {'name': name, 'namespace_name': namespace_name})
LOG.debug(msg)
raise exc.MetadefPropertyNotFound(property_name=name,
namespace_name=namespace_name)
return property_rec
def get(context, namespace_name, name, session):
"""get a property; raise if ns not found/visible or property not found"""
property_rec = _get_by_name(context, namespace_name, name, session)
return property_rec.to_dict()
def get_all(context, namespace_name, session):
namespace = namespace_api.get(context, namespace_name, session)
query = session.query(models.MetadefProperty)\
.filter_by(namespace_id=namespace['id'])
properties = query.all()
properties_list = []
for prop in properties:
properties_list.append(prop.to_dict())
return properties_list
def create(context, namespace_name, values, session):
namespace = namespace_api.get(context, namespace_name, session)
values.update({'namespace_id': namespace['id']})
property_rec = models.MetadefProperty()
metadef_utils.drop_protected_attrs(models.MetadefProperty, values)
property_rec.update(values.copy())
try:
property_rec.save(session=session)
except db_exc.DBDuplicateEntry:
msg = ("Can not create metadata definition property. A property"
" with name=%(name)s already exists in"
" namespace=%(namespace_name)s."
% {'name': property_rec.name,
'namespace_name': namespace_name})
LOG.debug(msg)
raise exc.MetadefDuplicateProperty(
property_name=property_rec.name,
namespace_name=namespace_name)
return property_rec.to_dict()
def update(context, namespace_name, property_id, values, session):
"""Update a property, raise if ns not found/visible or duplicate result"""
namespace_api.get(context, namespace_name, session)
property_rec = _get(context, property_id, session)
metadef_utils.drop_protected_attrs(models.MetadefProperty, values)
# values['updated_at'] = timeutils.utcnow() - done by TS mixin
try:
property_rec.update(values.copy())
property_rec.save(session=session)
except db_exc.DBDuplicateEntry:
msg = ("Invalid update. It would result in a duplicate"
" metadata definition property with the same name=%(name)s"
" in namespace=%(namespace_name)s."
% {'name': property_rec.name,
'namespace_name': namespace_name})
LOG.debug(msg)
emsg = (_("Invalid update. It would result in a duplicate"
" metadata definition property with the same name=%(name)s"
" in namespace=%(namespace_name)s.")
% {'name': property_rec.name,
'namespace_name': namespace_name})
raise exc.MetadefDuplicateProperty(emsg)
return property_rec.to_dict()
def delete(context, namespace_name, property_name, session):
property_rec = _get_by_name(
context, namespace_name, property_name, session)
if property_rec:
session.delete(property_rec)
session.flush()
return property_rec.to_dict()
def delete_namespace_content(context, namespace_id, session):
"""Use this def only if the ns for the id has been verified as visible"""
count = 0
query = session.query(models.MetadefProperty)\
.filter_by(namespace_id=namespace_id)
count = query.delete(synchronize_session='fetch')
return count
def delete_by_namespace_name(context, namespace_name, session):
namespace = namespace_api.get(context, namespace_name, session)
return delete_namespace_content(context, namespace['id'], session)
def count(context, namespace_name, session):
"""Get the count of properties for a namespace, raise if ns not found"""
namespace = namespace_api.get(context, namespace_name, session)
query = session.query(func.count(models.MetadefProperty.id))\
.filter_by(namespace_id=namespace['id'])
return query.scalar()
| apache-2.0 | 3,701,386,182,831,830,000 | -2,309,898,445,988,332,000 | 35.241176 | 78 | 0.668073 | false |