repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
samueldotj/TeeRISC-Simulator | tests/quick/se/00.hello/test.py | 56 | 1816 | # Copyright (c) 2006 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Steve Reinhardt
root.system.cpu[0].workload = LiveProcess(cmd = 'hello',
executable = binpath('hello'))
if root.system.cpu[0].checker != NULL:
root.system.cpu[0].checker.workload = root.system.cpu[0].workload
| bsd-3-clause | 1,922,590,762,508,636,400 | 55.75 | 72 | 0.768172 | false |
pokowaka/atreus-firmware | tmk/tmk_core/tool/mbed/mbed-sdk/workspace_tools/paths.py | 36 | 3374 | """
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from os.path import join
from os import getenv
# Conventions about the directory structure
from settings import ROOT, BUILD_DIR
# Allow overriding some of the build parameters using environment variables
BUILD_DIR = getenv("MBED_BUILD_DIR") or BUILD_DIR
# Embedded Libraries Sources
LIB_DIR = join(ROOT, "libraries")
TOOLS = join(ROOT, "workspace_tools")
TOOLS_DATA = join(TOOLS, "data")
TOOLS_BOOTLOADERS = join(TOOLS, "bootloaders")
# mbed libraries
MBED_BASE = join(LIB_DIR, "mbed")
MBED_API = join(MBED_BASE, "api")
MBED_COMMON = join(MBED_BASE, "common")
MBED_HAL = join(MBED_BASE, "hal")
MBED_TARGETS_PATH = join(MBED_BASE, "targets")
MBED_LIBRARIES = join(BUILD_DIR, "mbed")
# Tests
TEST_DIR = join(LIB_DIR, "tests")
HOST_TESTS = join(ROOT, "workspace_tools", "host_tests")
# mbed RPC
MBED_RPC = join(LIB_DIR, "rpc")
# mbed RTOS
RTOS = join(LIB_DIR, "rtos")
MBED_RTX = join(RTOS, "rtx")
RTOS_ABSTRACTION = join(RTOS, "rtos")
RTOS_LIBRARIES = join(BUILD_DIR, "rtos")
# TCP/IP
NET = join(LIB_DIR, "net")
ETH_SOURCES = join(NET, "eth")
LWIP_SOURCES = join(NET, "lwip")
VODAFONE_SOURCES = join(NET, "VodafoneUSBModem")
CELLULAR_SOURCES = join(NET, "cellular", "CellularModem")
CELLULAR_USB_SOURCES = join(NET, "cellular", "CellularUSBModem")
UBLOX_SOURCES = join(NET, "cellular", "UbloxUSBModem")
NET_LIBRARIES = join(BUILD_DIR, "net")
ETH_LIBRARY = join(NET_LIBRARIES, "eth")
VODAFONE_LIBRARY = join(NET_LIBRARIES, "VodafoneUSBModem")
UBLOX_LIBRARY = join(NET_LIBRARIES, "UbloxUSBModem")
# FS
FS_PATH = join(LIB_DIR, "fs")
FAT_FS = join(FS_PATH, "fat")
SD_FS = join(FS_PATH, "sd")
FS_LIBRARY = join(BUILD_DIR, "fat")
# DSP
DSP = join(LIB_DIR, "dsp")
DSP_CMSIS = join(DSP, "cmsis_dsp")
DSP_ABSTRACTION = join(DSP, "dsp")
DSP_LIBRARIES = join(BUILD_DIR, "dsp")
# USB Device
USB = join(LIB_DIR, "USBDevice")
USB_LIBRARIES = join(BUILD_DIR, "usb")
# USB Host
USB_HOST = join(LIB_DIR, "USBHost")
USB_HOST_LIBRARIES = join(BUILD_DIR, "usb_host")
# Export
EXPORT_DIR = join(BUILD_DIR, "export")
EXPORT_WORKSPACE = join(EXPORT_DIR, "workspace")
EXPORT_TMP = join(EXPORT_DIR, ".temp")
# CppUtest library
CPPUTEST_DIR = join(ROOT, "..")
CPPUTEST_SRC = join(CPPUTEST_DIR, "cpputest", "src", "CppUTest")
CPPUTEST_INC = join(CPPUTEST_DIR, "cpputest", "include")
CPPUTEST_INC_EXT = join(CPPUTEST_DIR, "cpputest", "include", "CppUTest")
# Platform dependant code is here (for armcc compiler)
CPPUTEST_PLATFORM_SRC = join(CPPUTEST_DIR, "cpputest", "src", "Platforms", "armcc")
CPPUTEST_PLATFORM_INC = join(CPPUTEST_DIR, "cpputest", "include", "Platforms", "armcc")
# Function 'main' used to run all compiled UTs
CPPUTEST_TESTRUNNER_SCR = join(TEST_DIR, "utest", "testrunner")
CPPUTEST_TESTRUNNER_INC = join(TEST_DIR, "utest", "testrunner")
CPPUTEST_LIBRARY = join(BUILD_DIR, "cpputest")
| gpl-3.0 | -1,424,437,508,543,351,800 | 29.954128 | 87 | 0.717546 | false |
Duraznos/root | interpreter/llvm/src/tools/clang/utils/ABITest/ABITestGen.py | 77 | 30083 | #!/usr/bin/env python
from pprint import pprint
import random, atexit, time
from random import randrange
import re
from Enumeration import *
from TypeGen import *
####
class TypePrinter:
def __init__(self, output, outputHeader=None,
outputTests=None, outputDriver=None,
headerName=None, info=None):
self.output = output
self.outputHeader = outputHeader
self.outputTests = outputTests
self.outputDriver = outputDriver
self.writeBody = outputHeader or outputTests or outputDriver
self.types = {}
self.testValues = {}
self.testReturnValues = {}
self.layoutTests = []
self.declarations = set()
if info:
for f in (self.output,self.outputHeader,self.outputTests,self.outputDriver):
if f:
print >>f,info
if self.writeBody:
print >>self.output, '#include <stdio.h>\n'
if self.outputTests:
print >>self.outputTests, '#include <stdio.h>'
print >>self.outputTests, '#include <string.h>'
print >>self.outputTests, '#include <assert.h>\n'
if headerName:
for f in (self.output,self.outputTests,self.outputDriver):
if f is not None:
print >>f, '#include "%s"\n'%(headerName,)
if self.outputDriver:
print >>self.outputDriver, '#include <stdio.h>'
print >>self.outputDriver, '#include <stdlib.h>\n'
print >>self.outputDriver, 'int main(int argc, char **argv) {'
print >>self.outputDriver, ' int index = -1;'
print >>self.outputDriver, ' if (argc > 1) index = atoi(argv[1]);'
def finish(self):
if self.layoutTests:
print >>self.output, 'int main(int argc, char **argv) {'
print >>self.output, ' int index = -1;'
print >>self.output, ' if (argc > 1) index = atoi(argv[1]);'
for i,f in self.layoutTests:
print >>self.output, ' if (index == -1 || index == %d)' % i
print >>self.output, ' %s();' % f
print >>self.output, ' return 0;'
print >>self.output, '}'
if self.outputDriver:
print >>self.outputDriver, ' printf("DONE\\n");'
print >>self.outputDriver, ' return 0;'
print >>self.outputDriver, '}'
def addDeclaration(self, decl):
if decl in self.declarations:
return False
self.declarations.add(decl)
if self.outputHeader:
print >>self.outputHeader, decl
else:
print >>self.output, decl
if self.outputTests:
print >>self.outputTests, decl
return True
def getTypeName(self, T):
name = self.types.get(T)
if name is None:
# Reserve slot
self.types[T] = None
self.types[T] = name = T.getTypeName(self)
return name
def writeLayoutTest(self, i, ty):
tyName = self.getTypeName(ty)
tyNameClean = tyName.replace(' ','_').replace('*','star')
fnName = 'test_%s' % tyNameClean
print >>self.output,'void %s(void) {' % fnName
self.printSizeOfType(' %s'%fnName, tyName, ty, self.output)
self.printAlignOfType(' %s'%fnName, tyName, ty, self.output)
self.printOffsetsOfType(' %s'%fnName, tyName, ty, self.output)
print >>self.output,'}'
print >>self.output
self.layoutTests.append((i,fnName))
def writeFunction(self, i, FT):
args = ', '.join(['%s arg%d'%(self.getTypeName(t),i) for i,t in enumerate(FT.argTypes)])
if not args:
args = 'void'
if FT.returnType is None:
retvalName = None
retvalTypeName = 'void'
else:
retvalTypeName = self.getTypeName(FT.returnType)
if self.writeBody or self.outputTests:
retvalName = self.getTestReturnValue(FT.returnType)
fnName = 'fn%d'%(FT.index,)
if self.outputHeader:
print >>self.outputHeader,'%s %s(%s);'%(retvalTypeName, fnName, args)
elif self.outputTests:
print >>self.outputTests,'%s %s(%s);'%(retvalTypeName, fnName, args)
print >>self.output,'%s %s(%s)'%(retvalTypeName, fnName, args),
if self.writeBody:
print >>self.output, '{'
for i,t in enumerate(FT.argTypes):
self.printValueOfType(' %s'%fnName, 'arg%d'%i, t)
if retvalName is not None:
print >>self.output, ' return %s;'%(retvalName,)
print >>self.output, '}'
else:
print >>self.output, '{}'
print >>self.output
if self.outputDriver:
print >>self.outputDriver, ' if (index == -1 || index == %d) {' % i
print >>self.outputDriver, ' extern void test_%s(void);' % fnName
print >>self.outputDriver, ' test_%s();' % fnName
print >>self.outputDriver, ' }'
if self.outputTests:
if self.outputHeader:
print >>self.outputHeader, 'void test_%s(void);'%(fnName,)
if retvalName is None:
retvalTests = None
else:
retvalTests = self.getTestValuesArray(FT.returnType)
tests = map(self.getTestValuesArray, FT.argTypes)
print >>self.outputTests, 'void test_%s(void) {'%(fnName,)
if retvalTests is not None:
print >>self.outputTests, ' printf("%s: testing return.\\n");'%(fnName,)
print >>self.outputTests, ' for (int i=0; i<%d; ++i) {'%(retvalTests[1],)
args = ', '.join(['%s[%d]'%(t,randrange(l)) for t,l in tests])
print >>self.outputTests, ' %s RV;'%(retvalTypeName,)
print >>self.outputTests, ' %s = %s[i];'%(retvalName, retvalTests[0])
print >>self.outputTests, ' RV = %s(%s);'%(fnName, args)
self.printValueOfType(' %s_RV'%fnName, 'RV', FT.returnType, output=self.outputTests, indent=4)
self.checkTypeValues('RV', '%s[i]' % retvalTests[0], FT.returnType, output=self.outputTests, indent=4)
print >>self.outputTests, ' }'
if tests:
print >>self.outputTests, ' printf("%s: testing arguments.\\n");'%(fnName,)
for i,(array,length) in enumerate(tests):
for j in range(length):
args = ['%s[%d]'%(t,randrange(l)) for t,l in tests]
args[i] = '%s[%d]'%(array,j)
print >>self.outputTests, ' %s(%s);'%(fnName, ', '.join(args),)
print >>self.outputTests, '}'
def getTestReturnValue(self, type):
typeName = self.getTypeName(type)
info = self.testReturnValues.get(typeName)
if info is None:
name = '%s_retval'%(typeName.replace(' ','_').replace('*','star'),)
print >>self.output, '%s %s;'%(typeName,name)
if self.outputHeader:
print >>self.outputHeader, 'extern %s %s;'%(typeName,name)
elif self.outputTests:
print >>self.outputTests, 'extern %s %s;'%(typeName,name)
info = self.testReturnValues[typeName] = name
return info
def getTestValuesArray(self, type):
typeName = self.getTypeName(type)
info = self.testValues.get(typeName)
if info is None:
name = '%s_values'%(typeName.replace(' ','_').replace('*','star'),)
print >>self.outputTests, 'static %s %s[] = {'%(typeName,name)
length = 0
for item in self.getTestValues(type):
print >>self.outputTests, '\t%s,'%(item,)
length += 1
print >>self.outputTests,'};'
info = self.testValues[typeName] = (name,length)
return info
def getTestValues(self, t):
if isinstance(t, BuiltinType):
if t.name=='float':
for i in ['0.0','-1.0','1.0']:
yield i+'f'
elif t.name=='double':
for i in ['0.0','-1.0','1.0']:
yield i
elif t.name in ('void *'):
yield '(void*) 0'
yield '(void*) -1'
else:
yield '(%s) 0'%(t.name,)
yield '(%s) -1'%(t.name,)
yield '(%s) 1'%(t.name,)
elif isinstance(t, EnumType):
for i in range(0, len(t.enumerators)):
yield 'enum%dval%d_%d' % (t.index, i, t.unique_id)
elif isinstance(t, RecordType):
nonPadding = [f for f in t.fields
if not f.isPaddingBitField()]
if not nonPadding:
yield '{ }'
return
# FIXME: Use designated initializers to access non-first
# fields of unions.
if t.isUnion:
for v in self.getTestValues(nonPadding[0]):
yield '{ %s }' % v
return
fieldValues = map(list, map(self.getTestValues, nonPadding))
for i,values in enumerate(fieldValues):
for v in values:
elements = map(random.choice,fieldValues)
elements[i] = v
yield '{ %s }'%(', '.join(elements))
elif isinstance(t, ComplexType):
for t in self.getTestValues(t.elementType):
yield '%s + %s * 1i'%(t,t)
elif isinstance(t, ArrayType):
values = list(self.getTestValues(t.elementType))
if not values:
yield '{ }'
for i in range(t.numElements):
for v in values:
elements = [random.choice(values) for i in range(t.numElements)]
elements[i] = v
yield '{ %s }'%(', '.join(elements))
else:
raise NotImplementedError,'Cannot make tests values of type: "%s"'%(t,)
def printSizeOfType(self, prefix, name, t, output=None, indent=2):
print >>output, '%*sprintf("%s: sizeof(%s) = %%ld\\n", (long)sizeof(%s));'%(indent, '', prefix, name, name)
def printAlignOfType(self, prefix, name, t, output=None, indent=2):
print >>output, '%*sprintf("%s: __alignof__(%s) = %%ld\\n", (long)__alignof__(%s));'%(indent, '', prefix, name, name)
def printOffsetsOfType(self, prefix, name, t, output=None, indent=2):
if isinstance(t, RecordType):
for i,f in enumerate(t.fields):
if f.isBitField():
continue
fname = 'field%d' % i
print >>output, '%*sprintf("%s: __builtin_offsetof(%s, %s) = %%ld\\n", (long)__builtin_offsetof(%s, %s));'%(indent, '', prefix, name, fname, name, fname)
def printValueOfType(self, prefix, name, t, output=None, indent=2):
if output is None:
output = self.output
if isinstance(t, BuiltinType):
value_expr = name
if t.name.split(' ')[-1] == '_Bool':
# Hack to work around PR5579.
value_expr = "%s ? 2 : 0" % name
if t.name.endswith('long long'):
code = 'lld'
elif t.name.endswith('long'):
code = 'ld'
elif t.name.split(' ')[-1] in ('_Bool','char','short',
'int','unsigned'):
code = 'd'
elif t.name in ('float','double'):
code = 'f'
elif t.name == 'long double':
code = 'Lf'
else:
code = 'p'
print >>output, '%*sprintf("%s: %s = %%%s\\n", %s);'%(
indent, '', prefix, name, code, value_expr)
elif isinstance(t, EnumType):
print >>output, '%*sprintf("%s: %s = %%d\\n", %s);'%(indent, '', prefix, name, name)
elif isinstance(t, RecordType):
if not t.fields:
print >>output, '%*sprintf("%s: %s (empty)\\n");'%(indent, '', prefix, name)
for i,f in enumerate(t.fields):
if f.isPaddingBitField():
continue
fname = '%s.field%d'%(name,i)
self.printValueOfType(prefix, fname, f, output=output, indent=indent)
elif isinstance(t, ComplexType):
self.printValueOfType(prefix, '(__real %s)'%name, t.elementType, output=output,indent=indent)
self.printValueOfType(prefix, '(__imag %s)'%name, t.elementType, output=output,indent=indent)
elif isinstance(t, ArrayType):
for i in range(t.numElements):
# Access in this fashion as a hackish way to portably
# access vectors.
if t.isVector:
self.printValueOfType(prefix, '((%s*) &%s)[%d]'%(t.elementType,name,i), t.elementType, output=output,indent=indent)
else:
self.printValueOfType(prefix, '%s[%d]'%(name,i), t.elementType, output=output,indent=indent)
else:
raise NotImplementedError,'Cannot print value of type: "%s"'%(t,)
def checkTypeValues(self, nameLHS, nameRHS, t, output=None, indent=2):
prefix = 'foo'
if output is None:
output = self.output
if isinstance(t, BuiltinType):
print >>output, '%*sassert(%s == %s);' % (indent, '', nameLHS, nameRHS)
elif isinstance(t, EnumType):
print >>output, '%*sassert(%s == %s);' % (indent, '', nameLHS, nameRHS)
elif isinstance(t, RecordType):
for i,f in enumerate(t.fields):
if f.isPaddingBitField():
continue
self.checkTypeValues('%s.field%d'%(nameLHS,i), '%s.field%d'%(nameRHS,i),
f, output=output, indent=indent)
if t.isUnion:
break
elif isinstance(t, ComplexType):
self.checkTypeValues('(__real %s)'%nameLHS, '(__real %s)'%nameRHS, t.elementType, output=output,indent=indent)
self.checkTypeValues('(__imag %s)'%nameLHS, '(__imag %s)'%nameRHS, t.elementType, output=output,indent=indent)
elif isinstance(t, ArrayType):
for i in range(t.numElements):
# Access in this fashion as a hackish way to portably
# access vectors.
if t.isVector:
self.checkTypeValues('((%s*) &%s)[%d]'%(t.elementType,nameLHS,i),
'((%s*) &%s)[%d]'%(t.elementType,nameRHS,i),
t.elementType, output=output,indent=indent)
else:
self.checkTypeValues('%s[%d]'%(nameLHS,i), '%s[%d]'%(nameRHS,i),
t.elementType, output=output,indent=indent)
else:
raise NotImplementedError,'Cannot print value of type: "%s"'%(t,)
import sys
def main():
from optparse import OptionParser, OptionGroup
parser = OptionParser("%prog [options] {indices}")
parser.add_option("", "--mode", dest="mode",
help="autogeneration mode (random or linear) [default %default]",
type='choice', choices=('random','linear'), default='linear')
parser.add_option("", "--count", dest="count",
help="autogenerate COUNT functions according to MODE",
type=int, default=0)
parser.add_option("", "--min", dest="minIndex", metavar="N",
help="start autogeneration with the Nth function type [default %default]",
type=int, default=0)
parser.add_option("", "--max", dest="maxIndex", metavar="N",
help="maximum index for random autogeneration [default %default]",
type=int, default=10000000)
parser.add_option("", "--seed", dest="seed",
help="random number generator seed [default %default]",
type=int, default=1)
parser.add_option("", "--use-random-seed", dest="useRandomSeed",
help="use random value for initial random number generator seed",
action='store_true', default=False)
parser.add_option("", "--skip", dest="skipTests",
help="add a test index to skip",
type=int, action='append', default=[])
parser.add_option("-o", "--output", dest="output", metavar="FILE",
help="write output to FILE [default %default]",
type=str, default='-')
parser.add_option("-O", "--output-header", dest="outputHeader", metavar="FILE",
help="write header file for output to FILE [default %default]",
type=str, default=None)
parser.add_option("-T", "--output-tests", dest="outputTests", metavar="FILE",
help="write function tests to FILE [default %default]",
type=str, default=None)
parser.add_option("-D", "--output-driver", dest="outputDriver", metavar="FILE",
help="write test driver to FILE [default %default]",
type=str, default=None)
parser.add_option("", "--test-layout", dest="testLayout", metavar="FILE",
help="test structure layout",
action='store_true', default=False)
group = OptionGroup(parser, "Type Enumeration Options")
# Builtins - Ints
group.add_option("", "--no-char", dest="useChar",
help="do not generate char types",
action="store_false", default=True)
group.add_option("", "--no-short", dest="useShort",
help="do not generate short types",
action="store_false", default=True)
group.add_option("", "--no-int", dest="useInt",
help="do not generate int types",
action="store_false", default=True)
group.add_option("", "--no-long", dest="useLong",
help="do not generate long types",
action="store_false", default=True)
group.add_option("", "--no-long-long", dest="useLongLong",
help="do not generate long long types",
action="store_false", default=True)
group.add_option("", "--no-unsigned", dest="useUnsigned",
help="do not generate unsigned integer types",
action="store_false", default=True)
# Other builtins
group.add_option("", "--no-bool", dest="useBool",
help="do not generate bool types",
action="store_false", default=True)
group.add_option("", "--no-float", dest="useFloat",
help="do not generate float types",
action="store_false", default=True)
group.add_option("", "--no-double", dest="useDouble",
help="do not generate double types",
action="store_false", default=True)
group.add_option("", "--no-long-double", dest="useLongDouble",
help="do not generate long double types",
action="store_false", default=True)
group.add_option("", "--no-void-pointer", dest="useVoidPointer",
help="do not generate void* types",
action="store_false", default=True)
# Enumerations
group.add_option("", "--no-enums", dest="useEnum",
help="do not generate enum types",
action="store_false", default=True)
# Derived types
group.add_option("", "--no-array", dest="useArray",
help="do not generate record types",
action="store_false", default=True)
group.add_option("", "--no-complex", dest="useComplex",
help="do not generate complex types",
action="store_false", default=True)
group.add_option("", "--no-record", dest="useRecord",
help="do not generate record types",
action="store_false", default=True)
group.add_option("", "--no-union", dest="recordUseUnion",
help="do not generate union types",
action="store_false", default=True)
group.add_option("", "--no-vector", dest="useVector",
help="do not generate vector types",
action="store_false", default=True)
group.add_option("", "--no-bit-field", dest="useBitField",
help="do not generate bit-field record members",
action="store_false", default=True)
group.add_option("", "--no-builtins", dest="useBuiltins",
help="do not use any types",
action="store_false", default=True)
# Tuning
group.add_option("", "--no-function-return", dest="functionUseReturn",
help="do not generate return types for functions",
action="store_false", default=True)
group.add_option("", "--vector-types", dest="vectorTypes",
help="comma separated list of vector types (e.g., v2i32) [default %default]",
action="store", type=str, default='v2i16, v1i64, v2i32, v4i16, v8i8, v2f32, v2i64, v4i32, v8i16, v16i8, v2f64, v4f32, v16f32', metavar="N")
group.add_option("", "--bit-fields", dest="bitFields",
help="comma separated list 'type:width' bit-field specifiers [default %default]",
action="store", type=str, default=(
"char:0,char:4,int:0,unsigned:1,int:1,int:4,int:13,int:24"))
group.add_option("", "--max-args", dest="functionMaxArgs",
help="maximum number of arguments per function [default %default]",
action="store", type=int, default=4, metavar="N")
group.add_option("", "--max-array", dest="arrayMaxSize",
help="maximum array size [default %default]",
action="store", type=int, default=4, metavar="N")
group.add_option("", "--max-record", dest="recordMaxSize",
help="maximum number of fields per record [default %default]",
action="store", type=int, default=4, metavar="N")
group.add_option("", "--max-record-depth", dest="recordMaxDepth",
help="maximum nested structure depth [default %default]",
action="store", type=int, default=None, metavar="N")
parser.add_option_group(group)
(opts, args) = parser.parse_args()
if not opts.useRandomSeed:
random.seed(opts.seed)
# Construct type generator
builtins = []
if opts.useBuiltins:
ints = []
if opts.useChar: ints.append(('char',1))
if opts.useShort: ints.append(('short',2))
if opts.useInt: ints.append(('int',4))
# FIXME: Wrong size.
if opts.useLong: ints.append(('long',4))
if opts.useLongLong: ints.append(('long long',8))
if opts.useUnsigned:
ints = ([('unsigned %s'%i,s) for i,s in ints] +
[('signed %s'%i,s) for i,s in ints])
builtins.extend(ints)
if opts.useBool: builtins.append(('_Bool',1))
if opts.useFloat: builtins.append(('float',4))
if opts.useDouble: builtins.append(('double',8))
if opts.useLongDouble: builtins.append(('long double',16))
# FIXME: Wrong size.
if opts.useVoidPointer: builtins.append(('void*',4))
btg = FixedTypeGenerator([BuiltinType(n,s) for n,s in builtins])
bitfields = []
for specifier in opts.bitFields.split(','):
if not specifier.strip():
continue
name,width = specifier.strip().split(':', 1)
bitfields.append(BuiltinType(name,None,int(width)))
bftg = FixedTypeGenerator(bitfields)
charType = BuiltinType('char',1)
shortType = BuiltinType('short',2)
intType = BuiltinType('int',4)
longlongType = BuiltinType('long long',8)
floatType = BuiltinType('float',4)
doubleType = BuiltinType('double',8)
sbtg = FixedTypeGenerator([charType, intType, floatType, doubleType])
atg = AnyTypeGenerator()
artg = AnyTypeGenerator()
def makeGenerator(atg, subgen, subfieldgen, useRecord, useArray, useBitField):
atg.addGenerator(btg)
if useBitField and opts.useBitField:
atg.addGenerator(bftg)
if useRecord and opts.useRecord:
assert subgen
atg.addGenerator(RecordTypeGenerator(subfieldgen, opts.recordUseUnion,
opts.recordMaxSize))
if opts.useComplex:
# FIXME: Allow overriding builtins here
atg.addGenerator(ComplexTypeGenerator(sbtg))
if useArray and opts.useArray:
assert subgen
atg.addGenerator(ArrayTypeGenerator(subgen, opts.arrayMaxSize))
if opts.useVector:
vTypes = []
for i,t in enumerate(opts.vectorTypes.split(',')):
m = re.match('v([1-9][0-9]*)([if][1-9][0-9]*)', t.strip())
if not m:
parser.error('Invalid vector type: %r' % t)
count,kind = m.groups()
count = int(count)
type = { 'i8' : charType,
'i16' : shortType,
'i32' : intType,
'i64' : longlongType,
'f32' : floatType,
'f64' : doubleType,
}.get(kind)
if not type:
parser.error('Invalid vector type: %r' % t)
vTypes.append(ArrayType(i, True, type, count * type.size))
atg.addGenerator(FixedTypeGenerator(vTypes))
if opts.useEnum:
atg.addGenerator(EnumTypeGenerator([None, '-1', '1', '1u'], 1, 4))
if opts.recordMaxDepth is None:
# Fully recursive, just avoid top-level arrays.
subFTG = AnyTypeGenerator()
subTG = AnyTypeGenerator()
atg = AnyTypeGenerator()
makeGenerator(subFTG, atg, atg, True, True, True)
makeGenerator(subTG, atg, subFTG, True, True, False)
makeGenerator(atg, subTG, subFTG, True, False, False)
else:
# Make a chain of type generators, each builds smaller
# structures.
base = AnyTypeGenerator()
fbase = AnyTypeGenerator()
makeGenerator(base, None, None, False, False, False)
makeGenerator(fbase, None, None, False, False, True)
for i in range(opts.recordMaxDepth):
n = AnyTypeGenerator()
fn = AnyTypeGenerator()
makeGenerator(n, base, fbase, True, True, False)
makeGenerator(fn, base, fbase, True, True, True)
base = n
fbase = fn
atg = AnyTypeGenerator()
makeGenerator(atg, base, fbase, True, False, False)
if opts.testLayout:
ftg = atg
else:
ftg = FunctionTypeGenerator(atg, opts.functionUseReturn, opts.functionMaxArgs)
# Override max,min,count if finite
if opts.maxIndex is None:
if ftg.cardinality is aleph0:
opts.maxIndex = 10000000
else:
opts.maxIndex = ftg.cardinality
opts.maxIndex = min(opts.maxIndex, ftg.cardinality)
opts.minIndex = max(0,min(opts.maxIndex-1, opts.minIndex))
if not opts.mode=='random':
opts.count = min(opts.count, opts.maxIndex-opts.minIndex)
if opts.output=='-':
output = sys.stdout
else:
output = open(opts.output,'w')
atexit.register(lambda: output.close())
outputHeader = None
if opts.outputHeader:
outputHeader = open(opts.outputHeader,'w')
atexit.register(lambda: outputHeader.close())
outputTests = None
if opts.outputTests:
outputTests = open(opts.outputTests,'w')
atexit.register(lambda: outputTests.close())
outputDriver = None
if opts.outputDriver:
outputDriver = open(opts.outputDriver,'w')
atexit.register(lambda: outputDriver.close())
info = ''
info += '// %s\n'%(' '.join(sys.argv),)
info += '// Generated: %s\n'%(time.strftime('%Y-%m-%d %H:%M'),)
info += '// Cardinality of function generator: %s\n'%(ftg.cardinality,)
info += '// Cardinality of type generator: %s\n'%(atg.cardinality,)
if opts.testLayout:
info += '\n#include <stdio.h>'
P = TypePrinter(output,
outputHeader=outputHeader,
outputTests=outputTests,
outputDriver=outputDriver,
headerName=opts.outputHeader,
info=info)
def write(N):
try:
FT = ftg.get(N)
except RuntimeError,e:
if e.args[0]=='maximum recursion depth exceeded':
print >>sys.stderr,'WARNING: Skipped %d, recursion limit exceeded (bad arguments?)'%(N,)
return
raise
if opts.testLayout:
P.writeLayoutTest(N, FT)
else:
P.writeFunction(N, FT)
if args:
[write(int(a)) for a in args]
skipTests = set(opts.skipTests)
for i in range(opts.count):
if opts.mode=='linear':
index = opts.minIndex + i
else:
index = opts.minIndex + int((opts.maxIndex-opts.minIndex) * random.random())
if index in skipTests:
continue
write(index)
P.finish()
if __name__=='__main__':
main()
| lgpl-2.1 | 1,692,771,722,170,362,000 | 43.766369 | 170 | 0.528239 | false |
holzman/glideinwms-old | creation/lib/cWParams.py | 1 | 17933 | #
# Project:
# glideinWMS
#
# File Version:
#
# Desscription:
# This module contains the generic params classes
#
# Extracted from:
# cgWParams.py
#
# Author:
# Igor Sfiligoi
#
import os
import copy
import sys
import os.path
import string
import socket
import types
import traceback
from glideinwms.lib import xmlParse
import xml.parsers.expat
from glideinwms.lib import xmlFormat
class SubParams:
def __init__(self,data):
self.data=data
def __eq__(self,other):
if other is None:
return False
if not isinstance(other,self.__class__):
return False
return self.data==other.data
# make data elements look like class attributes
def __getattr__(self,name):
return self.get_el(name)
# make data elements look like a dictionary
def keys(self):
return self.data.keys()
def has_key(self,name):
return self.data.has_key(name)
def __getitem__(self,name):
return self.get_el(name)
def __repr__(self):
return str(self.data)
def __str__(self):
return str(self.data)
#
# PROTECTED
#
# validate input against bae template (i.e. the defaults)
def validate(self,base,path_text):
for k in self.data.keys():
if not base.has_key(k):
# element not in base, report
raise RuntimeError, "Unknown parameter %s.%s"%(path_text,k)
else:
# verify subelements, if any
defel=base[k]
if isinstance(defel,xmlParse.OrderedDict):
# subdictionary
self[k].validate(defel,"%s.%s"%(path_text,k))
else:
# final element
defvalue,ktype,txt,subdef=defel
if isinstance(defvalue,xmlParse.OrderedDict):
# dictionary el elements
data_el=self[k]
for data_subkey in data_el.keys():
data_el[data_subkey].validate(subdef,"%s.%s.%s"%(path_text,k,data_subkey))
elif type(defvalue)==type([]):
# list of elements
if isinstance(self.data[k],xmlParse.OrderedDict):
if len(self.data[k].keys())==0:
self.data[k]=[] #XML does not know if an empty list is a dictionary or not.. fix this
mylist=self[k]
if type(mylist)!=type([]):
raise RuntimeError, "Parameter %s.%s not a list: %s %s"%(path_text,k,type(mylist),mylist)
for data_el in mylist:
data_el.validate(subdef,"%s.*.%s"%(path_text,k))
else:
# a simple value
pass #nothing to be done
# put default values where there is nothing
def use_defaults(self,defaults):
for k in defaults.keys():
defel=defaults[k]
if isinstance(defel,xmlParse.OrderedDict):
# subdictionary
if not self.data.has_key(k):
self.data[k]=xmlParse.OrderedDict() # first create empty, if does not exist
# then, set defaults on all elements of subdictionary
self[k].use_defaults(defel)
else:
# final element
defvalue,ktype,txt,subdef=defel
if isinstance(defvalue,xmlParse.OrderedDict):
# dictionary el elements
if not self.data.has_key(k):
self.data[k]=xmlParse.OrderedDict() # no elements yet, set and empty dictionary
else:
# need to set defaults on all elements in the dictionary
data_el=self[k]
for data_subkey in data_el.keys():
data_el[data_subkey].use_defaults(subdef)
elif type(defvalue)==type([]):
# list of elements
if not self.data.has_key(k):
self.data[k]=[] # no elements yet, set and empty list
else:
# need to set defaults on all elements in the list
for data_el in self[k]:
data_el.use_defaults(subdef)
else:
# a simple value
if not self.data.has_key(k):
self.data[k]=copy.deepcopy(defvalue)
# else nothing to do, already set
#
# PRIVATE
#
def get_el(self,name):
el=self.data[name]
if isinstance(el,xmlParse.OrderedDict):
return self.__class__(el)
elif type(el)==type([]):
outlst=[]
for k in el:
if isinstance(k,xmlParse.OrderedDict):
outlst.append(self.__class__(k))
else:
outlst.append(k)
return outlst
else:
return el
# abstract class
# children must define
# get_top_element(self)
# init_defaults(self)
# derive(self)
# get_xml_format(self)
class Params:
def __init__(self,usage_prefix,src_dir,argv):
self.usage_prefix=usage_prefix
# support dir
self.src_dir=src_dir
# initialize the defaults
self.defaults=xmlParse.OrderedDict()
self.init_defaults()
try:
if len(argv)<2:
raise RuntimeError, "Missing config file"
if argv[1]=="-help":
raise RuntimeError,"\nA config file will contain:\n%s\n\nThe config file will be in XML format."%self.get_description(" ")
self.cfg_name=os.path.abspath(argv[1])
self.load_file(self.cfg_name)
self.subparams.validate(self.defaults,self.get_top_element())
# make a copy of the loaded data, so that I can always tell what was derived and what was not
self.org_data=copy.deepcopy(self.data)
self.subparams.use_defaults(self.defaults)
# create derived values
self.derive()
except RuntimeError, e:
raise RuntimeError,"Unexpected error occurred loading the configuration file.\n\n%s" % e
pass
def derive(self):
return # by default nothing... children should overwrite this
def get_xml(self):
old_default_ignore_nones=xmlFormat.DEFAULT_IGNORE_NONES
old_default_lists_params=xmlFormat.DEFAULT_LISTS_PARAMS
old_default_dicts_params=xmlFormat.DEFAULT_DICTS_PARAMS
xmlFormat.DEFAULT_IGNORE_NONES=True
# these are used internally, do not need to be ordered
xml_format=self.get_xml_format()
xmlFormat.DEFAULT_LISTS_PARAMS=xml_format['lists_params']
xmlFormat.DEFAULT_DICTS_PARAMS=xml_format['dicts_params']
# hack needed to make xmlFormat to properly do the formating
old_DictType=types.DictType
types.DictType=type(xmlParse.OrderedDict())
out=xmlFormat.class2string(self.data,self.get_top_element())
types.DictType=old_DictType
xmlFormat.DEFAULT_IGNORE_NONES=old_default_ignore_nones
xmlFormat.DEFAULT_LISTS_PARAMS=old_default_lists_params
xmlFormat.DEFAULT_DICTS_PARAMS=old_default_dicts_params
return out
def get_description(self,indent="",width=80):
return defdict2string(self.defaults,indent,width)
#load from a file
#one element per line
# -opt val
def load_file(self,fname):
if fname=="-":
fname=sys.stdin
try:
self.data=xmlParse.xmlfile2dict(fname,use_ord_dict=True)
except xml.parsers.expat.ExpatError, e:
raise RuntimeError, "XML error parsing config file: %s"%e
except IOError, e:
raise RuntimeError, "Config file error: %s"%e
self.subparams=self.get_subparams_class()(self.data)
return
def __eq__(self,other):
if other is None:
return False
if not isinstance(other,Params):
return False
return self.subparams==other.subparams
def __getattr__(self,name):
return self.subparams.__getattr__(name)
#save into a file
#The file should be usable for reload
def save_into_file(self,fname,set_ro=False):
fd=open(fname,"w")
try:
fd.write(self.get_xml())
fd.write("\n")
finally:
fd.close()
if set_ro:
os.chmod(fname,os.stat(fname)[0]&0444)
return
#save into a file (making a backup)
#The file should be usable for reload
def save_into_file_wbackup(self,fname,set_ro=False):
# rewrite config file (write tmp file first)
tmp_name="%s.tmp"%fname
try:
os.unlink(tmp_name)
except:
pass # just protect
self.save_into_file(tmp_name)
# also save old one with backup name
backup_name="%s~"%fname
try:
os.unlink(backup_name)
except:
pass # just protect
try:
os.rename(fname,backup_name)
# make it user writable
os.chmod(backup_name,(os.stat(backup_name)[0]&0666)|0200)
except:
pass # just protect
# finally rename to the proper name
os.rename(tmp_name,fname)
if set_ro:
os.chmod(fname,os.stat(fname)[0]&0444)
# used internally to define subtype class
def get_subparams_class(self):
return SubParams
######################################################
# Ordered dictionary with comment support
class commentedOrderedDict(xmlParse.OrderedDict):
def __init__(self, dict = None):
# cannot call directly the parent due to the particular implementation restrictions
self._keys = []
xmlParse.UserDict.__init__(self, dict)
self["comment"]=(None,"string","Humman comment, not used by the code",None)
####################################################################
# INTERNAL, don't use directly
# Use the class definition instead
#
# return attribute value in the proper python format
def extract_attr_val(attr_obj):
if (not attr_obj.type in ("string","int","expr")):
raise RuntimeError, "Wrong attribute type '%s', must be either 'int' or 'string'"%attr_obj.type
if attr_obj.type in ("string","expr"):
return str(attr_obj.value)
else:
return int(attr_obj.value)
######################################################
# Define common defaults
class CommonSubParams(SubParams):
# return attribute value in the proper python format
def extract_attr_val(self,attr_obj):
return extract_attr_val(attr_obj)
class CommonParams(Params):
# populate self.defaults
def init_support_defaults(self):
# attributes are generic, shared between frontend and factory
self.attr_defaults=commentedOrderedDict()
self.attr_defaults["value"]=(None,"Value","Value of the attribute (string)",None)
self.attr_defaults["parameter"]=("True","Bool","Should it be passed as a parameter?",None)
self.attr_defaults["glidein_publish"]=("False","Bool","Should it be published by the glidein? (Used only if parameter is True.)",None)
self.attr_defaults["job_publish"]=("False","Bool","Should the glidein publish it to the job? (Used only if parameter is True.)",None)
self.attr_defaults["type"]=["string","string|int","What kind on data is value.",None]
# most file attributes are generic, shared between frontend and factory
self.file_defaults=commentedOrderedDict()
self.file_defaults["absfname"]=(None,"fname","File name on the local disk.",None)
self.file_defaults["relfname"]=(None,"fname","Name of the file once it gets to the worker node. (defaults to the last part of absfname)",None)
self.file_defaults["const"]=("True","Bool","Will the file be constant? If True, the file will be signed. If False, it can be modified at any time and will not be cached.",None)
self.file_defaults["executable"]=("False",'Bool','Is this an executable that needs to be run in the glidein?',None)
self.file_defaults["wrapper"]=("False",'Bool','Is this a wrapper script that needs to be sourced in the glidein job wrapper?',None)
self.file_defaults["untar"]=("False",'Bool','Do I need to untar it? ',None)
untar_defaults=commentedOrderedDict()
untar_defaults["cond_attr"]=("TRUE","attrname","If not the special value TRUE, the attribute name used at runtime to determine if the file should be untarred or not.",None)
untar_defaults["dir"]=(None,"dirname","Subdirectory in which to untar. (defaults to relname up to first .)",None)
untar_defaults["absdir_outattr"]=(None,"attrname",'Attribute to be set to the abs dir name where the tarball was unpacked. Will be defined only if untar effectively done. (Not defined if None)',None)
self.file_defaults["untar_options"]=untar_defaults
self.monitor_defaults=commentedOrderedDict()
self.monitor_defaults["javascriptRRD_dir"]=(os.path.join(self.src_dir,"../../externals/flot"),"base_dir","Location of the javascriptRRD library.",None)
self.monitor_defaults["flot_dir"]=(os.path.join(self.src_dir,"../../externals/flot"),"base_dir","Location of the flot library.",None)
self.monitor_defaults["jquery_dir"]=(os.path.join(self.src_dir,"../../externals/jquery"),"base_dir","Location of the jquery library.",None)
return
def get_subparams_class(self):
return CommonSubParams
# return attribute value in the proper python format
def extract_attr_val(self,attr_obj):
return extract_attr_val(attr_obj)
################################################
# Check is a string can be used as a valid name
# Whitelist based
# only allow ascii charactersm, the numbers and a few punctuations
# no spaces, not special characters or other punctuation
VALID_NAME_CHARS=string.ascii_letters+string.digits+'._-'
def is_valid_name(name):
# empty name is not valid
if name is None:
return False
if name=="":
return False
for c in name:
if not (c in VALID_NAME_CHARS):
return False
return True
############################################################
#
# P R I V A T E - Do not use
#
############################################################
#######################################################
# Wrap a text string to a fixed length
def col_wrap(text,width,indent):
short_text,next_char=shorten_text(text,width)
if len(short_text)!=len(text): # was shortened
#print short_text
org_short_text=short_text[0:]
# make sure you are not breaking words.
while not (next_char in ('',' ','\t')):
if len(short_text)==0:
# could not break on word boundary, leave as is
short_text=org_short_text
break
next_char=short_text[-1]
short_text=short_text[:-1]
if len(short_text)<=len(indent):
# too short, just split as it was
short_text=org_short_text
# calc next lines
subtext=col_wrap(indent+text[len(short_text):].lstrip(' \t'),width,indent)
# glue
return short_text+"\n"+subtext
else:
return text
# shorten text, make sure you properly account tabs
# return (shorten text,next char)
def shorten_text(text,width):
count=0
idx=0
for c in text:
if count>=width:
return (text[:idx],c)
if c=='\t':
count=((count+8)/8)*8 #round to neares mult of 8
if count>width:
return (text[:idx],c)
idx=idx+1
else:
count=count+1
idx=idx+1
return (text[:idx],'')
##################################################
# convert defualts to a string
def defdict2string(defaults,indent,width=80):
outstrarr=[]
keys=defaults.keys()
keys.sort()
final_keys=[]
# put simple elements first
for k in keys:
el=defaults[k]
if not isinstance(el,xmlParse.OrderedDict):
defvalue,ktype,txt,subdef=el
if subdef is None:
final_keys.append(k)
# put simple elements first
for k in keys:
el=defaults[k]
if isinstance(el,xmlParse.OrderedDict):
final_keys.append(k)
else:
defvalue,ktype,txt,subdef=el
if subdef is not None:
final_keys.append(k)
for k in final_keys:
el=defaults[k]
if isinstance(el,xmlParse.OrderedDict): #sub-dictionary
outstrarr.append("%s%s:"%(indent,k)+"\n"+defdict2string(el,indent+"\t",width))
else:
#print el
defvalue,ktype,txt,subdef=el
wrap_indent=indent+string.ljust("",len("%s(%s) - "%(k,ktype)))
if subdef is not None:
if isinstance(defvalue,xmlParse.OrderedDict):
dict_subdef=copy.deepcopy(subdef)
dict_subdef["name"]=(None,"name","Name",None)
outstrarr.append(col_wrap("%s%s(%s) - %s:"%(indent,k,ktype,txt),width,wrap_indent)+"\n"+defdict2string(dict_subdef,indent+"\t",width))
else:
outstrarr.append(col_wrap("%s%s(%s) - %s:"%(indent,k,ktype,txt),width,wrap_indent)+"\n"+defdict2string(subdef,indent+"\t",width))
else:
outstrarr.append(col_wrap("%s%s(%s) - %s [%s]"%(indent,k,ktype,txt,defvalue),width,wrap_indent))
return string.join(outstrarr,"\n")
| bsd-3-clause | -8,056,964,093,119,132,000 | 36.595388 | 207 | 0.568951 | false |
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_11_01/aio/operations/_vpn_connections_operations.py | 1 | 21995 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VpnConnectionsOperations:
"""VpnConnectionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
resource_group_name: str,
gateway_name: str,
connection_name: str,
**kwargs
) -> "_models.VpnConnection":
"""Retrieves the details of a vpn connection.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:param connection_name: The name of the vpn connection.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VpnConnection, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_11_01.models.VpnConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('VpnConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/vpnConnections/{connectionName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
gateway_name: str,
connection_name: str,
vpn_connection_parameters: "_models.VpnConnection",
**kwargs
) -> "_models.VpnConnection":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(vpn_connection_parameters, 'VpnConnection')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VpnConnection', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VpnConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/vpnConnections/{connectionName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
gateway_name: str,
connection_name: str,
vpn_connection_parameters: "_models.VpnConnection",
**kwargs
) -> AsyncLROPoller["_models.VpnConnection"]:
"""Creates a vpn connection to a scalable vpn gateway if it doesn't exist else updates the
existing connection.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:param connection_name: The name of the connection.
:type connection_name: str
:param vpn_connection_parameters: Parameters supplied to create or Update a VPN Connection.
:type vpn_connection_parameters: ~azure.mgmt.network.v2018_11_01.models.VpnConnection
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VpnConnection or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_11_01.models.VpnConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnConnection"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
connection_name=connection_name,
vpn_connection_parameters=vpn_connection_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/vpnConnections/{connectionName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
gateway_name: str,
connection_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/vpnConnections/{connectionName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
gateway_name: str,
connection_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes a vpn connection.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:param connection_name: The name of the connection.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
connection_name=connection_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/vpnConnections/{connectionName}'} # type: ignore
def list_by_vpn_gateway(
self,
resource_group_name: str,
gateway_name: str,
**kwargs
) -> AsyncIterable["_models.ListVpnConnectionsResult"]:
"""Retrieves all vpn connections for a particular virtual wan vpn gateway.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListVpnConnectionsResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_11_01.models.ListVpnConnectionsResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListVpnConnectionsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_vpn_gateway.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ListVpnConnectionsResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.Error, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_vpn_gateway.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/vpnConnections'} # type: ignore
| mit | -6,423,437,896,381,666,000 | 49.563218 | 220 | 0.648284 | false |
yugangw-msft/azure-cli | src/azure-cli/azure/cli/command_modules/sqlvm/__init__.py | 5 | 1864 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.core import AzCommandsLoader
import azure.cli.command_modules.sqlvm._help # pylint: disable=unused-import
# pylint: disable=line-too-long
class SqlVmCommandsLoader(AzCommandsLoader):
def __init__(self, cli_ctx=None):
from azure.cli.core import ModExtensionSuppress
from azure.cli.core.commands import CliCommandType
from azure.cli.core.profiles import ResourceType
sqlvm_custom = CliCommandType(operations_tmpl='azure.cli.command_modules.sqlvm.custom#{}')
super(SqlVmCommandsLoader, self).__init__(cli_ctx=cli_ctx,
custom_command_type=sqlvm_custom,
resource_type=ResourceType.MGMT_SQLVM,
suppress_extension=ModExtensionSuppress(__name__, 'sqlvm-preview', '0.1.0',
reason='These commands are now in the CLI.',
recommend_remove=True))
def load_command_table(self, args):
from azure.cli.command_modules.sqlvm.commands import load_command_table
load_command_table(self, args)
return self.command_table
def load_arguments(self, command):
from azure.cli.command_modules.sqlvm._params import load_arguments
load_arguments(self, command)
COMMAND_LOADER_CLS = SqlVmCommandsLoader
| mit | -4,389,754,064,148,998,700 | 49.378378 | 134 | 0.534871 | false |
Qalthos/ansible | lib/ansible/executor/module_common.py | 5 | 50899 | # (c) 2013-2014, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2015 Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast
import base64
import datetime
import json
import os
import shlex
import zipfile
import re
import pkgutil
from io import BytesIO
from ansible.release import __version__, __author__
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.executor.interpreter_discovery import InterpreterDiscoveryRequiredError
from ansible.executor.powershell import module_manifest as ps_manifest
from ansible.module_utils._text import to_bytes, to_text, to_native
from ansible.plugins.loader import module_utils_loader
# Must import strategy and use write_locks from there
# If we import write_locks directly then we end up binding a
# variable to the object and then it never gets updated.
from ansible.executor import action_write_locks
from ansible.utils.display import Display
try:
import importlib.util
import importlib.machinery
imp = None
except ImportError:
import imp
# HACK: keep Python 2.6 controller tests happy in CI until they're properly split
try:
from importlib import import_module
except ImportError:
import_module = __import__
# if we're on a Python that doesn't have FNFError, redefine it as IOError (since that's what we'll see)
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
display = Display()
REPLACER = b"#<<INCLUDE_ANSIBLE_MODULE_COMMON>>"
REPLACER_VERSION = b"\"<<ANSIBLE_VERSION>>\""
REPLACER_COMPLEX = b"\"<<INCLUDE_ANSIBLE_MODULE_COMPLEX_ARGS>>\""
REPLACER_WINDOWS = b"# POWERSHELL_COMMON"
REPLACER_JSONARGS = b"<<INCLUDE_ANSIBLE_MODULE_JSON_ARGS>>"
REPLACER_SELINUX = b"<<SELINUX_SPECIAL_FILESYSTEMS>>"
# We could end up writing out parameters with unicode characters so we need to
# specify an encoding for the python source file
ENCODING_STRING = u'# -*- coding: utf-8 -*-'
b_ENCODING_STRING = b'# -*- coding: utf-8 -*-'
# module_common is relative to module_utils, so fix the path
_MODULE_UTILS_PATH = os.path.join(os.path.dirname(__file__), '..', 'module_utils')
# ******************************************************************************
ANSIBALLZ_TEMPLATE = u'''%(shebang)s
%(coding)s
_ANSIBALLZ_WRAPPER = True # For test-module script to tell this is a ANSIBALLZ_WRAPPER
# This code is part of Ansible, but is an independent component.
# The code in this particular templatable string, and this templatable string
# only, is BSD licensed. Modules which end up using this snippet, which is
# dynamically combined together by Ansible still belong to the author of the
# module, and they may assign their own license to the complete work.
#
# Copyright (c), James Cammarata, 2016
# Copyright (c), Toshio Kuratomi, 2016
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
def _ansiballz_main():
%(rlimit)s
import os
import os.path
import sys
import __main__
# For some distros and python versions we pick up this script in the temporary
# directory. This leads to problems when the ansible module masks a python
# library that another import needs. We have not figured out what about the
# specific distros and python versions causes this to behave differently.
#
# Tested distros:
# Fedora23 with python3.4 Works
# Ubuntu15.10 with python2.7 Works
# Ubuntu15.10 with python3.4 Fails without this
# Ubuntu16.04.1 with python3.5 Fails without this
# To test on another platform:
# * use the copy module (since this shadows the stdlib copy module)
# * Turn off pipelining
# * Make sure that the destination file does not exist
# * ansible ubuntu16-test -m copy -a 'src=/etc/motd dest=/var/tmp/m'
# This will traceback in shutil. Looking at the complete traceback will show
# that shutil is importing copy which finds the ansible module instead of the
# stdlib module
scriptdir = None
try:
scriptdir = os.path.dirname(os.path.realpath(__main__.__file__))
except (AttributeError, OSError):
# Some platforms don't set __file__ when reading from stdin
# OSX raises OSError if using abspath() in a directory we don't have
# permission to read (realpath calls abspath)
pass
if scriptdir is not None:
sys.path = [p for p in sys.path if p != scriptdir]
import base64
import shutil
import tempfile
import zipfile
if sys.version_info < (3,):
# imp is used on Python<3
import imp
bytes = str
MOD_DESC = ('.py', 'U', imp.PY_SOURCE)
PY3 = False
else:
# importlib is only used on Python>=3
import importlib.util
unicode = str
PY3 = True
ZIPDATA = """%(zipdata)s"""
# Note: temp_path isn't needed once we switch to zipimport
def invoke_module(modlib_path, temp_path, json_params):
# When installed via setuptools (including python setup.py install),
# ansible may be installed with an easy-install.pth file. That file
# may load the system-wide install of ansible rather than the one in
# the module. sitecustomize is the only way to override that setting.
z = zipfile.ZipFile(modlib_path, mode='a')
# py3: modlib_path will be text, py2: it's bytes. Need bytes at the end
sitecustomize = u'import sys\\nsys.path.insert(0,"%%s")\\n' %% modlib_path
sitecustomize = sitecustomize.encode('utf-8')
# Use a ZipInfo to work around zipfile limitation on hosts with
# clocks set to a pre-1980 year (for instance, Raspberry Pi)
zinfo = zipfile.ZipInfo()
zinfo.filename = 'sitecustomize.py'
zinfo.date_time = ( %(year)i, %(month)i, %(day)i, %(hour)i, %(minute)i, %(second)i)
z.writestr(zinfo, sitecustomize)
# Note: Remove the following section when we switch to zipimport
# Write the module to disk for imp.load_module
module = os.path.join(temp_path, '__main__.py')
with open(module, 'wb') as f:
f.write(z.read('__main__.py'))
f.close()
# End pre-zipimport section
z.close()
# Put the zipped up module_utils we got from the controller first in the python path so that we
# can monkeypatch the right basic
sys.path.insert(0, modlib_path)
# Monkeypatch the parameters into basic
from ansible.module_utils import basic
basic._ANSIBLE_ARGS = json_params
%(coverage)s
# Run the module! By importing it as '__main__', it thinks it is executing as a script
if sys.version_info >= (3,):
spec = importlib.util.spec_from_file_location('__main__', module)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
else:
with open(module, 'rb') as mod:
imp.load_module('__main__', mod, module, MOD_DESC)
# Ansible modules must exit themselves
print('{"msg": "New-style module did not handle its own exit", "failed": true}')
sys.exit(1)
def debug(command, zipped_mod, json_params):
# The code here normally doesn't run. It's only used for debugging on the
# remote machine.
#
# The subcommands in this function make it easier to debug ansiballz
# modules. Here's the basic steps:
#
# Run ansible with the environment variable: ANSIBLE_KEEP_REMOTE_FILES=1 and -vvv
# to save the module file remotely::
# $ ANSIBLE_KEEP_REMOTE_FILES=1 ansible host1 -m ping -a 'data=october' -vvv
#
# Part of the verbose output will tell you where on the remote machine the
# module was written to::
# [...]
# <host1> SSH: EXEC ssh -C -q -o ControlMaster=auto -o ControlPersist=60s -o KbdInteractiveAuthentication=no -o
# PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey -o PasswordAuthentication=no -o ConnectTimeout=10 -o
# ControlPath=/home/badger/.ansible/cp/ansible-ssh-%%h-%%p-%%r -tt rhel7 '/bin/sh -c '"'"'LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8
# LC_MESSAGES=en_US.UTF-8 /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461173013.93-9076457629738/ping'"'"''
# [...]
#
# Login to the remote machine and run the module file via from the previous
# step with the explode subcommand to extract the module payload into
# source files::
# $ ssh host1
# $ /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461173013.93-9076457629738/ping explode
# Module expanded into:
# /home/badger/.ansible/tmp/ansible-tmp-1461173408.08-279692652635227/ansible
#
# You can now edit the source files to instrument the code or experiment with
# different parameter values. When you're ready to run the code you've modified
# (instead of the code from the actual zipped module), use the execute subcommand like this::
# $ /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461173013.93-9076457629738/ping execute
# Okay to use __file__ here because we're running from a kept file
basedir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'debug_dir')
args_path = os.path.join(basedir, 'args')
script_path = os.path.join(basedir, '__main__.py')
if command == 'excommunicate':
print('The excommunicate debug command is deprecated and will be removed in 2.11. Use execute instead.')
command = 'execute'
if command == 'explode':
# transform the ZIPDATA into an exploded directory of code and then
# print the path to the code. This is an easy way for people to look
# at the code on the remote machine for debugging it in that
# environment
z = zipfile.ZipFile(zipped_mod)
for filename in z.namelist():
if filename.startswith('/'):
raise Exception('Something wrong with this module zip file: should not contain absolute paths')
dest_filename = os.path.join(basedir, filename)
if dest_filename.endswith(os.path.sep) and not os.path.exists(dest_filename):
os.makedirs(dest_filename)
else:
directory = os.path.dirname(dest_filename)
if not os.path.exists(directory):
os.makedirs(directory)
f = open(dest_filename, 'wb')
f.write(z.read(filename))
f.close()
# write the args file
f = open(args_path, 'wb')
f.write(json_params)
f.close()
print('Module expanded into:')
print('%%s' %% basedir)
exitcode = 0
elif command == 'execute':
# Execute the exploded code instead of executing the module from the
# embedded ZIPDATA. This allows people to easily run their modified
# code on the remote machine to see how changes will affect it.
# Set pythonpath to the debug dir
sys.path.insert(0, basedir)
# read in the args file which the user may have modified
with open(args_path, 'rb') as f:
json_params = f.read()
# Monkeypatch the parameters into basic
from ansible.module_utils import basic
basic._ANSIBLE_ARGS = json_params
# Run the module! By importing it as '__main__', it thinks it is executing as a script
if PY3:
import importlib.util
spec = importlib.util.spec_from_file_location('__main__', script_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
else:
import imp
with open(script_path, 'r') as f:
imp.load_module('__main__', f, script_path, ('.py', 'r', imp.PY_SOURCE))
# Ansible modules must exit themselves
print('{"msg": "New-style module did not handle its own exit", "failed": true}')
sys.exit(1)
else:
print('WARNING: Unknown debug command. Doing nothing.')
exitcode = 0
return exitcode
#
# See comments in the debug() method for information on debugging
#
ANSIBALLZ_PARAMS = %(params)s
if PY3:
ANSIBALLZ_PARAMS = ANSIBALLZ_PARAMS.encode('utf-8')
try:
# There's a race condition with the controller removing the
# remote_tmpdir and this module executing under async. So we cannot
# store this in remote_tmpdir (use system tempdir instead)
# Only need to use [ansible_module]_payload_ in the temp_path until we move to zipimport
# (this helps ansible-test produce coverage stats)
temp_path = tempfile.mkdtemp(prefix='ansible_%(ansible_module)s_payload_')
zipped_mod = os.path.join(temp_path, 'ansible_%(ansible_module)s_payload.zip')
with open(zipped_mod, 'wb') as modlib:
modlib.write(base64.b64decode(ZIPDATA))
if len(sys.argv) == 2:
exitcode = debug(sys.argv[1], zipped_mod, ANSIBALLZ_PARAMS)
else:
# Note: temp_path isn't needed once we switch to zipimport
invoke_module(zipped_mod, temp_path, ANSIBALLZ_PARAMS)
finally:
try:
shutil.rmtree(temp_path)
except (NameError, OSError):
# tempdir creation probably failed
pass
sys.exit(exitcode)
if __name__ == '__main__':
_ansiballz_main()
'''
ANSIBALLZ_COVERAGE_TEMPLATE = '''
# Access to the working directory is required by coverage.
# Some platforms, such as macOS, may not allow querying the working directory when using become to drop privileges.
try:
os.getcwd()
except OSError:
os.chdir('/')
os.environ['COVERAGE_FILE'] = '%(coverage_output)s'
import atexit
try:
import coverage
except ImportError:
print('{"msg": "Could not import `coverage` module.", "failed": true}')
sys.exit(1)
cov = coverage.Coverage(config_file='%(coverage_config)s')
def atexit_coverage():
cov.stop()
cov.save()
atexit.register(atexit_coverage)
cov.start()
'''
ANSIBALLZ_COVERAGE_CHECK_TEMPLATE = '''
try:
if PY3:
if importlib.util.find_spec('coverage') is None:
raise ImportError
else:
imp.find_module('coverage')
except ImportError:
print('{"msg": "Could not find `coverage` module.", "failed": true}')
sys.exit(1)
'''
ANSIBALLZ_RLIMIT_TEMPLATE = '''
import resource
existing_soft, existing_hard = resource.getrlimit(resource.RLIMIT_NOFILE)
# adjust soft limit subject to existing hard limit
requested_soft = min(existing_hard, %(rlimit_nofile)d)
if requested_soft != existing_soft:
try:
resource.setrlimit(resource.RLIMIT_NOFILE, (requested_soft, existing_hard))
except ValueError:
# some platforms (eg macOS) lie about their hard limit
pass
'''
def _strip_comments(source):
# Strip comments and blank lines from the wrapper
buf = []
for line in source.splitlines():
l = line.strip()
if not l or l.startswith(u'#'):
continue
buf.append(line)
return u'\n'.join(buf)
if C.DEFAULT_KEEP_REMOTE_FILES:
# Keep comments when KEEP_REMOTE_FILES is set. That way users will see
# the comments with some nice usage instructions
ACTIVE_ANSIBALLZ_TEMPLATE = ANSIBALLZ_TEMPLATE
else:
# ANSIBALLZ_TEMPLATE stripped of comments for smaller over the wire size
ACTIVE_ANSIBALLZ_TEMPLATE = _strip_comments(ANSIBALLZ_TEMPLATE)
class ModuleDepFinder(ast.NodeVisitor):
# Caveats:
# This code currently does not handle:
# * relative imports from py2.6+ from . import urls
IMPORT_PREFIX_SIZE = len('ansible.module_utils.')
def __init__(self, *args, **kwargs):
"""
Walk the ast tree for the python module.
Save submodule[.submoduleN][.identifier] into self.submodules
self.submodules will end up with tuples like:
- ('basic',)
- ('urls', 'fetch_url')
- ('database', 'postgres')
- ('database', 'postgres', 'quote')
It's up to calling code to determine whether the final element of the
dotted strings are module names or something else (function, class, or
variable names)
"""
super(ModuleDepFinder, self).__init__(*args, **kwargs)
self.submodules = set()
def visit_Import(self, node):
# import ansible.module_utils.MODLIB[.MODLIBn] [as asname]
for alias in node.names:
if alias.name.startswith('ansible.module_utils.'):
py_mod = alias.name[self.IMPORT_PREFIX_SIZE:]
py_mod = tuple(py_mod.split('.'))
self.submodules.add(py_mod)
elif alias.name.startswith('ansible_collections.'):
# keep 'ansible_collections.' as a sentinel prefix to trigger collection-loaded MU path
self.submodules.add(tuple(alias.name.split('.')))
self.generic_visit(node)
def visit_ImportFrom(self, node):
# Specialcase: six is a special case because of its
# import logic
if node.names[0].name == '_six':
self.submodules.add(('_six',))
elif node.module.startswith('ansible.module_utils'):
where_from = node.module[self.IMPORT_PREFIX_SIZE:]
if where_from:
# from ansible.module_utils.MODULE1[.MODULEn] import IDENTIFIER [as asname]
# from ansible.module_utils.MODULE1[.MODULEn] import MODULEn+1 [as asname]
# from ansible.module_utils.MODULE1[.MODULEn] import MODULEn+1 [,IDENTIFIER] [as asname]
py_mod = tuple(where_from.split('.'))
for alias in node.names:
self.submodules.add(py_mod + (alias.name,))
else:
# from ansible.module_utils import MODLIB [,MODLIB2] [as asname]
for alias in node.names:
self.submodules.add((alias.name,))
elif node.module.startswith('ansible_collections.'):
# TODO: finish out the subpackage et al cases
if node.module.endswith('plugins.module_utils'):
# from ansible_collections.ns.coll.plugins.module_utils import MODULE [as aname] [,MODULE2] [as aname]
py_mod = tuple(node.module.split('.'))
for alias in node.names:
self.submodules.add(py_mod + (alias.name,))
else:
# from ansible_collections.ns.coll.plugins.module_utils.MODULE import IDENTIFIER [as aname]
self.submodules.add(tuple(node.module.split('.')))
self.generic_visit(node)
def _slurp(path):
if not os.path.exists(path):
raise AnsibleError("imported module support code does not exist at %s" % os.path.abspath(path))
with open(path, 'rb') as fd:
data = fd.read()
return data
def _get_shebang(interpreter, task_vars, templar, args=tuple()):
"""
Note not stellar API:
Returns None instead of always returning a shebang line. Doing it this
way allows the caller to decide to use the shebang it read from the
file rather than trust that we reformatted what they already have
correctly.
"""
interpreter_name = os.path.basename(interpreter).strip()
# FUTURE: add logical equivalence for python3 in the case of py3-only modules
# check for first-class interpreter config
interpreter_config_key = "INTERPRETER_%s" % interpreter_name.upper()
if C.config.get_configuration_definitions().get(interpreter_config_key):
# a config def exists for this interpreter type; consult config for the value
interpreter_out = C.config.get_config_value(interpreter_config_key, variables=task_vars)
discovered_interpreter_config = u'discovered_interpreter_%s' % interpreter_name
interpreter_out = templar.template(interpreter_out.strip())
facts_from_task_vars = task_vars.get('ansible_facts', {})
# handle interpreter discovery if requested
if interpreter_out in ['auto', 'auto_legacy', 'auto_silent', 'auto_legacy_silent']:
if discovered_interpreter_config not in facts_from_task_vars:
# interpreter discovery is desired, but has not been run for this host
raise InterpreterDiscoveryRequiredError("interpreter discovery needed",
interpreter_name=interpreter_name,
discovery_mode=interpreter_out)
else:
interpreter_out = facts_from_task_vars[discovered_interpreter_config]
else:
# a config def does not exist for this interpreter type; consult vars for a possible direct override
interpreter_config = u'ansible_%s_interpreter' % interpreter_name
if interpreter_config not in task_vars:
return None, interpreter
interpreter_out = templar.template(task_vars[interpreter_config].strip())
shebang = u'#!' + interpreter_out
if args:
shebang = shebang + u' ' + u' '.join(args)
return shebang, interpreter_out
class ModuleInfo:
def __init__(self, name, paths):
self.py_src = False
self.pkg_dir = False
path = None
if imp is None:
self._info = info = importlib.machinery.PathFinder.find_spec(name, paths)
if info is not None:
self.py_src = os.path.splitext(info.origin)[1] in importlib.machinery.SOURCE_SUFFIXES
self.pkg_dir = info.origin.endswith('/__init__.py')
path = info.origin
else:
raise ImportError("No module named '%s'" % name)
else:
self._info = info = imp.find_module(name, paths)
self.py_src = info[2][2] == imp.PY_SOURCE
self.pkg_dir = info[2][2] == imp.PKG_DIRECTORY
if self.pkg_dir:
path = os.path.join(info[1], '__init__.py')
else:
path = info[1]
self.path = path
def get_source(self):
if imp and self.py_src:
try:
return self._info[0].read()
finally:
self._info[0].close()
return _slurp(self.path)
def recursive_finder(name, data, py_module_names, py_module_cache, zf):
"""
Using ModuleDepFinder, make sure we have all of the module_utils files that
the module its module_utils files needs.
"""
# Parse the module and find the imports of ansible.module_utils
try:
tree = ast.parse(data)
except (SyntaxError, IndentationError) as e:
raise AnsibleError("Unable to import %s due to %s" % (name, e.msg))
finder = ModuleDepFinder()
finder.visit(tree)
#
# Determine what imports that we've found are modules (vs class, function.
# variable names) for packages
#
normalized_modules = set()
# Loop through the imports that we've found to normalize them
# Exclude paths that match with paths we've already processed
# (Have to exclude them a second time once the paths are processed)
module_utils_paths = [p for p in module_utils_loader._get_paths(subdirs=False) if os.path.isdir(p)]
module_utils_paths.append(_MODULE_UTILS_PATH)
for py_module_name in finder.submodules.difference(py_module_names):
module_info = None
if py_module_name[0] == 'six':
# Special case the python six library because it messes up the
# import process in an incompatible way
module_info = ModuleInfo('six', module_utils_paths)
py_module_name = ('six',)
idx = 0
elif py_module_name[0] == '_six':
# Special case the python six library because it messes up the
# import process in an incompatible way
module_info = ModuleInfo('_six', [os.path.join(p, 'six') for p in module_utils_paths])
py_module_name = ('six', '_six')
idx = 0
elif py_module_name[0] == 'ansible_collections':
# FIXME: replicate module name resolution like below for granular imports
# this is a collection-hosted MU; look it up with get_data
package_name = '.'.join(py_module_name[:-1])
resource_name = py_module_name[-1] + '.py'
try:
# FIXME: need this in py2 for some reason TBD, but we shouldn't (get_data delegates to wrong loader without it)
pkg = import_module(package_name)
module_info = pkgutil.get_data(package_name, resource_name)
except FileNotFoundError:
# FIXME: implement package fallback code
raise AnsibleError('unable to load collection-hosted module_util {0}.{1}'.format(to_native(package_name),
to_native(resource_name)))
idx = 0
else:
# Check whether either the last or the second to last identifier is
# a module name
for idx in (1, 2):
if len(py_module_name) < idx:
break
try:
module_info = ModuleInfo(py_module_name[-idx],
[os.path.join(p, *py_module_name[:-idx]) for p in module_utils_paths])
break
except ImportError:
continue
# Could not find the module. Construct a helpful error message.
if module_info is None:
msg = ['Could not find imported module support code for %s. Looked for' % (name,)]
if idx == 2:
msg.append('either %s.py or %s.py' % (py_module_name[-1], py_module_name[-2]))
else:
msg.append(py_module_name[-1])
raise AnsibleError(' '.join(msg))
if isinstance(module_info, bytes): # collection-hosted, just the code
# HACK: maybe surface collection dirs in here and use existing find_module code?
normalized_name = py_module_name
normalized_data = module_info
normalized_path = os.path.join(*py_module_name)
py_module_cache[normalized_name] = (normalized_data, normalized_path)
normalized_modules.add(normalized_name)
# HACK: walk back up the package hierarchy to pick up package inits; this won't do the right thing
# for actual packages yet...
accumulated_pkg_name = []
for pkg in py_module_name[:-1]:
accumulated_pkg_name.append(pkg) # we're accumulating this across iterations
normalized_name = tuple(accumulated_pkg_name[:] + ['__init__']) # extra machinations to get a hashable type (list is not)
if normalized_name not in py_module_cache:
normalized_path = os.path.join(*accumulated_pkg_name)
# HACK: possibly preserve some of the actual package file contents; problematic for extend_paths and others though?
normalized_data = ''
py_module_cache[normalized_name] = (normalized_data, normalized_path)
normalized_modules.add(normalized_name)
else:
# Found a byte compiled file rather than source. We cannot send byte
# compiled over the wire as the python version might be different.
# imp.find_module seems to prefer to return source packages so we just
# error out if imp.find_module returns byte compiled files (This is
# fragile as it depends on undocumented imp.find_module behaviour)
if not module_info.pkg_dir and not module_info.py_src:
msg = ['Could not find python source for imported module support code for %s. Looked for' % name]
if idx == 2:
msg.append('either %s.py or %s.py' % (py_module_name[-1], py_module_name[-2]))
else:
msg.append(py_module_name[-1])
raise AnsibleError(' '.join(msg))
if idx == 2:
# We've determined that the last portion was an identifier and
# thus, not part of the module name
py_module_name = py_module_name[:-1]
# If not already processed then we've got work to do
# If not in the cache, then read the file into the cache
# We already have a file handle for the module open so it makes
# sense to read it now
if py_module_name not in py_module_cache:
if module_info.pkg_dir:
# Read the __init__.py instead of the module file as this is
# a python package
normalized_name = py_module_name + ('__init__',)
if normalized_name not in py_module_names:
normalized_data = module_info.get_source()
py_module_cache[normalized_name] = (normalized_data, module_info.path)
normalized_modules.add(normalized_name)
else:
normalized_name = py_module_name
if normalized_name not in py_module_names:
normalized_data = module_info.get_source()
py_module_cache[normalized_name] = (normalized_data, module_info.path)
normalized_modules.add(normalized_name)
# Make sure that all the packages that this module is a part of
# are also added
for i in range(1, len(py_module_name)):
py_pkg_name = py_module_name[:-i] + ('__init__',)
if py_pkg_name not in py_module_names:
pkg_dir_info = ModuleInfo(py_pkg_name[-1],
[os.path.join(p, *py_pkg_name[:-1]) for p in module_utils_paths])
normalized_modules.add(py_pkg_name)
py_module_cache[py_pkg_name] = (pkg_dir_info.get_source(), pkg_dir_info.path)
# FIXME: Currently the AnsiBallZ wrapper monkeypatches module args into a global
# variable in basic.py. If a module doesn't import basic.py, then the AnsiBallZ wrapper will
# traceback when it tries to monkypatch. So, for now, we have to unconditionally include
# basic.py.
#
# In the future we need to change the wrapper to monkeypatch the args into a global variable in
# their own, separate python module. That way we won't require basic.py. Modules which don't
# want basic.py can import that instead. AnsibleModule will need to change to import the vars
# from the separate python module and mirror the args into its global variable for backwards
# compatibility.
if ('basic',) not in py_module_names:
pkg_dir_info = ModuleInfo('basic', module_utils_paths)
normalized_modules.add(('basic',))
py_module_cache[('basic',)] = (pkg_dir_info.get_source(), pkg_dir_info.path)
# End of AnsiballZ hack
#
# iterate through all of the ansible.module_utils* imports that we haven't
# already checked for new imports
#
# set of modules that we haven't added to the zipfile
unprocessed_py_module_names = normalized_modules.difference(py_module_names)
for py_module_name in unprocessed_py_module_names:
# HACK: this seems to work as a way to identify a collections-based import, but a stronger identifier would be better
if not py_module_cache[py_module_name][1].startswith('/'):
dir_prefix = ''
else:
dir_prefix = 'ansible/module_utils'
py_module_path = os.path.join(*py_module_name)
py_module_file_name = '%s.py' % py_module_path
zf.writestr(os.path.join(dir_prefix,
py_module_file_name), py_module_cache[py_module_name][0])
display.vvvvv("Using module_utils file %s" % py_module_cache[py_module_name][1])
# Add the names of the files we're scheduling to examine in the loop to
# py_module_names so that we don't re-examine them in the next pass
# through recursive_finder()
py_module_names.update(unprocessed_py_module_names)
for py_module_file in unprocessed_py_module_names:
recursive_finder(py_module_file, py_module_cache[py_module_file][0], py_module_names, py_module_cache, zf)
# Save memory; the file won't have to be read again for this ansible module.
del py_module_cache[py_module_file]
def _is_binary(b_module_data):
textchars = bytearray(set([7, 8, 9, 10, 12, 13, 27]) | set(range(0x20, 0x100)) - set([0x7f]))
start = b_module_data[:1024]
return bool(start.translate(None, textchars))
def _find_module_utils(module_name, b_module_data, module_path, module_args, task_vars, templar, module_compression, async_timeout, become,
become_method, become_user, become_password, become_flags, environment):
"""
Given the source of the module, convert it to a Jinja2 template to insert
module code and return whether it's a new or old style module.
"""
module_substyle = module_style = 'old'
# module_style is something important to calling code (ActionBase). It
# determines how arguments are formatted (json vs k=v) and whether
# a separate arguments file needs to be sent over the wire.
# module_substyle is extra information that's useful internally. It tells
# us what we have to look to substitute in the module files and whether
# we're using module replacer or ansiballz to format the module itself.
if _is_binary(b_module_data):
module_substyle = module_style = 'binary'
elif REPLACER in b_module_data:
# Do REPLACER before from ansible.module_utils because we need make sure
# we substitute "from ansible.module_utils basic" for REPLACER
module_style = 'new'
module_substyle = 'python'
b_module_data = b_module_data.replace(REPLACER, b'from ansible.module_utils.basic import *')
# FUTURE: combined regex for this stuff, or a "looks like Python, let's inspect further" mechanism
elif b'from ansible.module_utils.' in b_module_data or b'from ansible_collections.' in b_module_data\
or b'import ansible_collections.' in b_module_data:
module_style = 'new'
module_substyle = 'python'
elif REPLACER_WINDOWS in b_module_data:
module_style = 'new'
module_substyle = 'powershell'
b_module_data = b_module_data.replace(REPLACER_WINDOWS, b'#Requires -Module Ansible.ModuleUtils.Legacy')
elif re.search(b'#Requires -Module', b_module_data, re.IGNORECASE) \
or re.search(b'#Requires -Version', b_module_data, re.IGNORECASE)\
or re.search(b'#AnsibleRequires -OSVersion', b_module_data, re.IGNORECASE) \
or re.search(b'#AnsibleRequires -Powershell', b_module_data, re.IGNORECASE) \
or re.search(b'#AnsibleRequires -CSharpUtil', b_module_data, re.IGNORECASE):
module_style = 'new'
module_substyle = 'powershell'
elif REPLACER_JSONARGS in b_module_data:
module_style = 'new'
module_substyle = 'jsonargs'
elif b'WANT_JSON' in b_module_data:
module_substyle = module_style = 'non_native_want_json'
shebang = None
# Neither old-style, non_native_want_json nor binary modules should be modified
# except for the shebang line (Done by modify_module)
if module_style in ('old', 'non_native_want_json', 'binary'):
return b_module_data, module_style, shebang
output = BytesIO()
py_module_names = set()
if module_substyle == 'python':
params = dict(ANSIBLE_MODULE_ARGS=module_args,)
try:
python_repred_params = repr(json.dumps(params))
except TypeError as e:
raise AnsibleError("Unable to pass options to module, they must be JSON serializable: %s" % to_native(e))
try:
compression_method = getattr(zipfile, module_compression)
except AttributeError:
display.warning(u'Bad module compression string specified: %s. Using ZIP_STORED (no compression)' % module_compression)
compression_method = zipfile.ZIP_STORED
lookup_path = os.path.join(C.DEFAULT_LOCAL_TMP, 'ansiballz_cache')
cached_module_filename = os.path.join(lookup_path, "%s-%s" % (module_name, module_compression))
zipdata = None
# Optimization -- don't lock if the module has already been cached
if os.path.exists(cached_module_filename):
display.debug('ANSIBALLZ: using cached module: %s' % cached_module_filename)
with open(cached_module_filename, 'rb') as module_data:
zipdata = module_data.read()
else:
if module_name in action_write_locks.action_write_locks:
display.debug('ANSIBALLZ: Using lock for %s' % module_name)
lock = action_write_locks.action_write_locks[module_name]
else:
# If the action plugin directly invokes the module (instead of
# going through a strategy) then we don't have a cross-process
# Lock specifically for this module. Use the "unexpected
# module" lock instead
display.debug('ANSIBALLZ: Using generic lock for %s' % module_name)
lock = action_write_locks.action_write_locks[None]
display.debug('ANSIBALLZ: Acquiring lock')
with lock:
display.debug('ANSIBALLZ: Lock acquired: %s' % id(lock))
# Check that no other process has created this while we were
# waiting for the lock
if not os.path.exists(cached_module_filename):
display.debug('ANSIBALLZ: Creating module')
# Create the module zip data
zipoutput = BytesIO()
zf = zipfile.ZipFile(zipoutput, mode='w', compression=compression_method)
# Note: If we need to import from release.py first,
# remember to catch all exceptions: https://github.com/ansible/ansible/issues/16523
zf.writestr('ansible/__init__.py',
b'from pkgutil import extend_path\n__path__=extend_path(__path__,__name__)\n__version__="' +
to_bytes(__version__) + b'"\n__author__="' +
to_bytes(__author__) + b'"\n')
zf.writestr('ansible/module_utils/__init__.py', b'from pkgutil import extend_path\n__path__=extend_path(__path__,__name__)\n')
zf.writestr('__main__.py', b_module_data)
py_module_cache = {('__init__',): (b'', '[builtin]')}
recursive_finder(module_name, b_module_data, py_module_names, py_module_cache, zf)
zf.close()
zipdata = base64.b64encode(zipoutput.getvalue())
# Write the assembled module to a temp file (write to temp
# so that no one looking for the file reads a partially
# written file)
if not os.path.exists(lookup_path):
# Note -- if we have a global function to setup, that would
# be a better place to run this
os.makedirs(lookup_path)
display.debug('ANSIBALLZ: Writing module')
with open(cached_module_filename + '-part', 'wb') as f:
f.write(zipdata)
# Rename the file into its final position in the cache so
# future users of this module can read it off the
# filesystem instead of constructing from scratch.
display.debug('ANSIBALLZ: Renaming module')
os.rename(cached_module_filename + '-part', cached_module_filename)
display.debug('ANSIBALLZ: Done creating module')
if zipdata is None:
display.debug('ANSIBALLZ: Reading module after lock')
# Another process wrote the file while we were waiting for
# the write lock. Go ahead and read the data from disk
# instead of re-creating it.
try:
with open(cached_module_filename, 'rb') as f:
zipdata = f.read()
except IOError:
raise AnsibleError('A different worker process failed to create module file. '
'Look at traceback for that process for debugging information.')
zipdata = to_text(zipdata, errors='surrogate_or_strict')
shebang, interpreter = _get_shebang(u'/usr/bin/python', task_vars, templar)
if shebang is None:
shebang = u'#!/usr/bin/python'
# Enclose the parts of the interpreter in quotes because we're
# substituting it into the template as a Python string
interpreter_parts = interpreter.split(u' ')
interpreter = u"'{0}'".format(u"', '".join(interpreter_parts))
# FUTURE: the module cache entry should be invalidated if we got this value from a host-dependent source
rlimit_nofile = C.config.get_config_value('PYTHON_MODULE_RLIMIT_NOFILE', variables=task_vars)
if not isinstance(rlimit_nofile, int):
rlimit_nofile = int(templar.template(rlimit_nofile))
if rlimit_nofile:
rlimit = ANSIBALLZ_RLIMIT_TEMPLATE % dict(
rlimit_nofile=rlimit_nofile,
)
else:
rlimit = ''
coverage_config = os.environ.get('_ANSIBLE_COVERAGE_CONFIG')
if coverage_config:
coverage_output = os.environ['_ANSIBLE_COVERAGE_OUTPUT']
if coverage_output:
# Enable code coverage analysis of the module.
# This feature is for internal testing and may change without notice.
coverage = ANSIBALLZ_COVERAGE_TEMPLATE % dict(
coverage_config=coverage_config,
coverage_output=coverage_output,
)
else:
# Verify coverage is available without importing it.
# This will detect when a module would fail with coverage enabled with minimal overhead.
coverage = ANSIBALLZ_COVERAGE_CHECK_TEMPLATE
else:
coverage = ''
now = datetime.datetime.utcnow()
output.write(to_bytes(ACTIVE_ANSIBALLZ_TEMPLATE % dict(
zipdata=zipdata,
ansible_module=module_name,
params=python_repred_params,
shebang=shebang,
interpreter=interpreter,
coding=ENCODING_STRING,
year=now.year,
month=now.month,
day=now.day,
hour=now.hour,
minute=now.minute,
second=now.second,
coverage=coverage,
rlimit=rlimit,
)))
b_module_data = output.getvalue()
elif module_substyle == 'powershell':
# Powershell/winrm don't actually make use of shebang so we can
# safely set this here. If we let the fallback code handle this
# it can fail in the presence of the UTF8 BOM commonly added by
# Windows text editors
shebang = u'#!powershell'
# create the common exec wrapper payload and set that as the module_data
# bytes
b_module_data = ps_manifest._create_powershell_wrapper(
b_module_data, module_args, environment, async_timeout, become,
become_method, become_user, become_password, become_flags,
module_substyle
)
elif module_substyle == 'jsonargs':
module_args_json = to_bytes(json.dumps(module_args))
# these strings could be included in a third-party module but
# officially they were included in the 'basic' snippet for new-style
# python modules (which has been replaced with something else in
# ansiballz) If we remove them from jsonargs-style module replacer
# then we can remove them everywhere.
python_repred_args = to_bytes(repr(module_args_json))
b_module_data = b_module_data.replace(REPLACER_VERSION, to_bytes(repr(__version__)))
b_module_data = b_module_data.replace(REPLACER_COMPLEX, python_repred_args)
b_module_data = b_module_data.replace(REPLACER_SELINUX, to_bytes(','.join(C.DEFAULT_SELINUX_SPECIAL_FS)))
# The main event -- substitute the JSON args string into the module
b_module_data = b_module_data.replace(REPLACER_JSONARGS, module_args_json)
facility = b'syslog.' + to_bytes(task_vars.get('ansible_syslog_facility', C.DEFAULT_SYSLOG_FACILITY), errors='surrogate_or_strict')
b_module_data = b_module_data.replace(b'syslog.LOG_USER', facility)
return (b_module_data, module_style, shebang)
def modify_module(module_name, module_path, module_args, templar, task_vars=None, module_compression='ZIP_STORED', async_timeout=0, become=False,
become_method=None, become_user=None, become_password=None, become_flags=None, environment=None):
"""
Used to insert chunks of code into modules before transfer rather than
doing regular python imports. This allows for more efficient transfer in
a non-bootstrapping scenario by not moving extra files over the wire and
also takes care of embedding arguments in the transferred modules.
This version is done in such a way that local imports can still be
used in the module code, so IDEs don't have to be aware of what is going on.
Example:
from ansible.module_utils.basic import *
... will result in the insertion of basic.py into the module
from the module_utils/ directory in the source tree.
For powershell, this code effectively no-ops, as the exec wrapper requires access to a number of
properties not available here.
"""
task_vars = {} if task_vars is None else task_vars
environment = {} if environment is None else environment
with open(module_path, 'rb') as f:
# read in the module source
b_module_data = f.read()
(b_module_data, module_style, shebang) = _find_module_utils(module_name, b_module_data, module_path, module_args, task_vars, templar, module_compression,
async_timeout=async_timeout, become=become, become_method=become_method,
become_user=become_user, become_password=become_password, become_flags=become_flags,
environment=environment)
if module_style == 'binary':
return (b_module_data, module_style, to_text(shebang, nonstring='passthru'))
elif shebang is None:
b_lines = b_module_data.split(b"\n", 1)
if b_lines[0].startswith(b"#!"):
b_shebang = b_lines[0].strip()
# shlex.split on python-2.6 needs bytes. On python-3.x it needs text
args = shlex.split(to_native(b_shebang[2:], errors='surrogate_or_strict'))
# _get_shebang() takes text strings
args = [to_text(a, errors='surrogate_or_strict') for a in args]
interpreter = args[0]
b_new_shebang = to_bytes(_get_shebang(interpreter, task_vars, templar, args[1:])[0],
errors='surrogate_or_strict', nonstring='passthru')
if b_new_shebang:
b_lines[0] = b_shebang = b_new_shebang
if os.path.basename(interpreter).startswith(u'python'):
b_lines.insert(1, b_ENCODING_STRING)
shebang = to_text(b_shebang, nonstring='passthru', errors='surrogate_or_strict')
else:
# No shebang, assume a binary module?
pass
b_module_data = b"\n".join(b_lines)
return (b_module_data, module_style, shebang)
| gpl-3.0 | -5,245,933,034,511,005,000 | 44.937726 | 157 | 0.616044 | false |
Mikkeli222/Nipatus | Nipatus/screen_previous.py | 1 | 2350 | """
screen_previous.py - This module contains class PreviousScreen.
Copyright © 2015 Mikko Pitkänen mun.lumia900@gmail.com
"""
import tkinter
import tkinter.ttk
import data as Data
import misc_functions
class PreviousScreen(tkinter.Frame):
def __init__(self, master, fn, arg):
tkinter.Frame.__init__(self)
self.signal_obj = master
self.signal_func = fn
self.signal_arg = arg
self.savedata = {}
saves = misc_functions.read_save()
for save in saves:
a_data = Data.Data(False, save)
datetime = a_data.time
date = datetime[6:8] + "." + datetime[4:6] + "." + datetime[:4]
time = (datetime[8:10] + "." + datetime[10:12] +
"." + datetime[12:14])
if date not in self.savedata:
self.savedata[date] = {time: a_data}
elif time not in self.savedata[date]:
self.savedata[date][time] = a_data
else:
while time in self.savedata[date]:
time += " "
self.savedata[date][time] = a_data
self.svar_date = tkinter.StringVar(self)
self.cb_date = tkinter.ttk.Combobox(self, state="readonly", textvariable=self.svar_date, width=10)
self.cb_date.bind("<<ComboboxSelected>>", self.set_times)
self.cb_date.grid(row=0, column=0)
self.svar_time = tkinter.StringVar(self)
self.cb_time = tkinter.ttk.Combobox(self, state="readonly", textvariable=self.svar_time, width=10)
self.cb_time.bind("<<ComboboxSelected>>", self.signal)
self.cb_time.grid(row=0, column=1)
if "" != "":
self.set_times("<<ComboboxSelected>>")
self.set_dates()
def set_dates(self):
dates = []
for date in self.savedata:
dates.append(date)
self.cb_date.config(values=dates)
def set_times(self, event):
date = self.svar_date.get()
times = []
for time in sorted(self.savedata[date]):
times.append(time)
self.cb_time.config(values=times)
def signal(self, event):
self.signal_func(self.signal_obj, self.signal_arg)
def exit(self):
date = self.svar_date.get()
time = self.svar_time.get()
return self.savedata[date][time]
| apache-2.0 | -5,841,486,286,677,880,000 | 31.583333 | 106 | 0.568627 | false |
AMOboxTV/AMOBox.LegoBuild | script.module.pydevd/lib/pydev_ipython/inputhookglut.py | 2 | 5595 | # coding: utf-8
"""
GLUT Inputhook support functions
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
# GLUT is quite an old library and it is difficult to ensure proper
# integration within IPython since original GLUT does not allow to handle
# events one by one. Instead, it requires for the mainloop to be entered
# and never returned (there is not even a function to exit he
# mainloop). Fortunately, there are alternatives such as freeglut
# (available for linux and windows) and the OSX implementation gives
# access to a glutCheckLoop() function that blocks itself until a new
# event is received. This means we have to setup the idle callback to
# ensure we got at least one event that will unblock the function.
#
# Furthermore, it is not possible to install these handlers without a window
# being first created. We choose to make this window invisible. This means that
# display mode options are set at this level and user won't be able to change
# them later without modifying the code. This should probably be made available
# via IPython options system.
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import os
import sys
import time
import signal
import OpenGL.GLUT as glut
import OpenGL.platform as platform
from timeit import default_timer as clock
from pydev_ipython.inputhook import stdin_ready
#-----------------------------------------------------------------------------
# Constants
#-----------------------------------------------------------------------------
# Frame per second : 60
# Should probably be an IPython option
glut_fps = 60
# Display mode : double buffeed + rgba + depth
# Should probably be an IPython option
glut_display_mode = (glut.GLUT_DOUBLE |
glut.GLUT_RGBA |
glut.GLUT_DEPTH)
glutMainLoopEvent = None
if sys.platform == 'darwin':
try:
glutCheckLoop = platform.createBaseFunction(
'glutCheckLoop', dll=platform.GLUT, resultType=None,
argTypes=[],
doc='glutCheckLoop( ) -> None',
argNames=(),
)
except AttributeError:
raise RuntimeError(
'''Your glut implementation does not allow interactive sessions'''
'''Consider installing freeglut.''')
glutMainLoopEvent = glutCheckLoop
elif glut.HAVE_FREEGLUT:
glutMainLoopEvent = glut.glutMainLoopEvent
else:
raise RuntimeError(
'''Your glut implementation does not allow interactive sessions. '''
'''Consider installing freeglut.''')
#-----------------------------------------------------------------------------
# Callback functions
#-----------------------------------------------------------------------------
def glut_display():
# Dummy display function
pass
def glut_idle():
# Dummy idle function
pass
def glut_close():
# Close function only hides the current window
glut.glutHideWindow()
glutMainLoopEvent()
def glut_int_handler(signum, frame):
# Catch sigint and print the defautl message
signal.signal(signal.SIGINT, signal.default_int_handler)
print '\nKeyboardInterrupt'
# Need to reprint the prompt at this stage
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def inputhook_glut():
"""Run the pyglet event loop by processing pending events only.
This keeps processing pending events until stdin is ready. After
processing all pending events, a call to time.sleep is inserted. This is
needed, otherwise, CPU usage is at 100%. This sleep time should be tuned
though for best performance.
"""
# We need to protect against a user pressing Control-C when IPython is
# idle and this is running. We trap KeyboardInterrupt and pass.
signal.signal(signal.SIGINT, glut_int_handler)
try:
t = clock()
# Make sure the default window is set after a window has been closed
if glut.glutGetWindow() == 0:
glut.glutSetWindow( 1 )
glutMainLoopEvent()
return 0
while not stdin_ready():
glutMainLoopEvent()
# We need to sleep at this point to keep the idle CPU load
# low. However, if sleep to long, GUI response is poor. As
# a compromise, we watch how often GUI events are being processed
# and switch between a short and long sleep time. Here are some
# stats useful in helping to tune this.
# time CPU load
# 0.001 13%
# 0.005 3%
# 0.01 1.5%
# 0.05 0.5%
used_time = clock() - t
if used_time > 10.0:
# print 'Sleep for 1 s' # dbg
time.sleep(1.0)
elif used_time > 0.1:
# Few GUI events coming in, so we can sleep longer
# print 'Sleep for 0.05 s' # dbg
time.sleep(0.05)
else:
# Many GUI events coming in, so sleep only very little
time.sleep(0.001)
except KeyboardInterrupt:
pass
return 0
| gpl-2.0 | 6,698,461,270,886,765,000 | 35.568627 | 79 | 0.563181 | false |
emile2016/flaskbb | flaskbb/utils/markup.py | 2 | 2496 | import os
import re
from flask import url_for
import mistune
from pygments import highlight
from pygments.lexers import get_lexer_by_name
from pygments.formatters import HtmlFormatter
_re_emoji = re.compile(r':([a-z0-9\+\-_]+):', re.I)
_re_user = re.compile(r'@(\w+)', re.I)
def collect_emojis():
"""Returns a dictionary containing all emojis with their
name and filename. If the folder doesn't exist it returns a empty
dictionary.
"""
emojis = dict()
full_path = os.path.join(os.path.abspath("flaskbb"), "static", "emoji")
# return an empty dictionary if the path doesn't exist
if not os.path.exists(full_path):
return emojis
for emoji in os.listdir(full_path):
name, ending = emoji.split(".")
if ending in ["png", "gif", "jpg", "jpeg"]:
emojis[name] = emoji
return emojis
EMOJIS = collect_emojis()
class FlaskBBRenderer(mistune.Renderer):
"""Markdown with some syntetic sugar such as @user gets linked to the
user's profile and emoji support.
"""
def __init__(self, **kwargs):
super(FlaskBBRenderer, self).__init__(**kwargs)
def paragraph(self, text):
"""Rendering paragraph tags. Like ``<p>`` with emoji support."""
def emojify(match):
value = match.group(1)
if value in EMOJIS:
filename = url_for(
"static",
filename="emoji/{}".format(EMOJIS[value])
)
emoji = "<img class='{css}' alt='{alt}' src='{src}' />".format(
css="emoji", alt=value,
src=filename
)
return emoji
return match.group(0)
def userify(match):
value = match.group(1)
user = "<a href='{url}'>@{user}</a>".format(
url=url_for("user.profile", username=value, _external=False),
user=value
)
return user
text = _re_emoji.sub(emojify, text)
text = _re_user.sub(userify, text)
return '<p>%s</p>\n' % text.strip(' ')
def block_code(self, code, lang):
if not lang:
return '\n<pre><code>%s</code></pre>\n' % \
mistune.escape(code)
lexer = get_lexer_by_name(lang, stripall=True)
formatter = HtmlFormatter()
return highlight(code, lexer, formatter)
renderer = FlaskBBRenderer()
markdown = mistune.Markdown(renderer=renderer)
| bsd-3-clause | -4,756,548,123,256,392,000 | 28.023256 | 79 | 0.563301 | false |
alexforencich/xfcp | lib/eth/tb/test_ip_eth_rx_64.py | 2 | 37694 | #!/usr/bin/env python
"""
Copyright (c) 2014-2018 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from myhdl import *
import os
import eth_ep
import ip_ep
module = 'ip_eth_rx_64'
testbench = 'test_%s' % module
srcs = []
srcs.append("../rtl/%s.v" % module)
srcs.append("%s.v" % testbench)
src = ' '.join(srcs)
build_cmd = "iverilog -o %s.vvp %s" % (testbench, src)
def bench():
# Inputs
clk = Signal(bool(0))
rst = Signal(bool(0))
current_test = Signal(intbv(0)[8:])
s_eth_hdr_valid = Signal(bool(0))
s_eth_dest_mac = Signal(intbv(0)[48:])
s_eth_src_mac = Signal(intbv(0)[48:])
s_eth_type = Signal(intbv(0)[16:])
s_eth_payload_axis_tdata = Signal(intbv(0)[64:])
s_eth_payload_axis_tkeep = Signal(intbv(0)[8:])
s_eth_payload_axis_tvalid = Signal(bool(0))
s_eth_payload_axis_tlast = Signal(bool(0))
s_eth_payload_axis_tuser = Signal(bool(0))
m_ip_hdr_ready = Signal(bool(0))
m_ip_payload_axis_tready = Signal(bool(0))
# Outputs
s_eth_hdr_ready = Signal(bool(0))
s_eth_payload_axis_tready = Signal(bool(0))
m_ip_hdr_valid = Signal(bool(0))
m_eth_dest_mac = Signal(intbv(0)[48:])
m_eth_src_mac = Signal(intbv(0)[48:])
m_eth_type = Signal(intbv(0)[16:])
m_ip_version = Signal(intbv(0)[4:])
m_ip_ihl = Signal(intbv(0)[4:])
m_ip_dscp = Signal(intbv(0)[6:])
m_ip_ecn = Signal(intbv(0)[2:])
m_ip_length = Signal(intbv(0)[16:])
m_ip_identification = Signal(intbv(0)[16:])
m_ip_flags = Signal(intbv(0)[3:])
m_ip_fragment_offset = Signal(intbv(0)[13:])
m_ip_ttl = Signal(intbv(0)[8:])
m_ip_protocol = Signal(intbv(0)[8:])
m_ip_header_checksum = Signal(intbv(0)[16:])
m_ip_source_ip = Signal(intbv(0)[32:])
m_ip_dest_ip = Signal(intbv(0)[32:])
m_ip_payload_axis_tdata = Signal(intbv(0)[64:])
m_ip_payload_axis_tkeep = Signal(intbv(0)[8:])
m_ip_payload_axis_tvalid = Signal(bool(0))
m_ip_payload_axis_tlast = Signal(bool(0))
m_ip_payload_axis_tuser = Signal(bool(0))
busy = Signal(bool(0))
error_header_early_termination = Signal(bool(0))
error_payload_early_termination = Signal(bool(0))
error_invalid_header = Signal(bool(0))
error_invalid_checksum = Signal(bool(0))
# sources and sinks
source_pause = Signal(bool(0))
sink_pause = Signal(bool(0))
source = eth_ep.EthFrameSource()
source_logic = source.create_logic(
clk,
rst,
eth_hdr_ready=s_eth_hdr_ready,
eth_hdr_valid=s_eth_hdr_valid,
eth_dest_mac=s_eth_dest_mac,
eth_src_mac=s_eth_src_mac,
eth_type=s_eth_type,
eth_payload_tdata=s_eth_payload_axis_tdata,
eth_payload_tkeep=s_eth_payload_axis_tkeep,
eth_payload_tvalid=s_eth_payload_axis_tvalid,
eth_payload_tready=s_eth_payload_axis_tready,
eth_payload_tlast=s_eth_payload_axis_tlast,
eth_payload_tuser=s_eth_payload_axis_tuser,
pause=source_pause,
name='source'
)
sink = ip_ep.IPFrameSink()
sink_logic = sink.create_logic(
clk,
rst,
ip_hdr_ready=m_ip_hdr_ready,
ip_hdr_valid=m_ip_hdr_valid,
eth_dest_mac=m_eth_dest_mac,
eth_src_mac=m_eth_src_mac,
eth_type=m_eth_type,
ip_version=m_ip_version,
ip_ihl=m_ip_ihl,
ip_dscp=m_ip_dscp,
ip_ecn=m_ip_ecn,
ip_length=m_ip_length,
ip_identification=m_ip_identification,
ip_flags=m_ip_flags,
ip_fragment_offset=m_ip_fragment_offset,
ip_ttl=m_ip_ttl,
ip_protocol=m_ip_protocol,
ip_header_checksum=m_ip_header_checksum,
ip_source_ip=m_ip_source_ip,
ip_dest_ip=m_ip_dest_ip,
ip_payload_tdata=m_ip_payload_axis_tdata,
ip_payload_tkeep=m_ip_payload_axis_tkeep,
ip_payload_tvalid=m_ip_payload_axis_tvalid,
ip_payload_tready=m_ip_payload_axis_tready,
ip_payload_tlast=m_ip_payload_axis_tlast,
ip_payload_tuser=m_ip_payload_axis_tuser,
pause=sink_pause,
name='sink'
)
# DUT
if os.system(build_cmd):
raise Exception("Error running build command")
dut = Cosimulation(
"vvp -m myhdl %s.vvp -lxt2" % testbench,
clk=clk,
rst=rst,
current_test=current_test,
s_eth_hdr_valid=s_eth_hdr_valid,
s_eth_hdr_ready=s_eth_hdr_ready,
s_eth_dest_mac=s_eth_dest_mac,
s_eth_src_mac=s_eth_src_mac,
s_eth_type=s_eth_type,
s_eth_payload_axis_tdata=s_eth_payload_axis_tdata,
s_eth_payload_axis_tkeep=s_eth_payload_axis_tkeep,
s_eth_payload_axis_tvalid=s_eth_payload_axis_tvalid,
s_eth_payload_axis_tready=s_eth_payload_axis_tready,
s_eth_payload_axis_tlast=s_eth_payload_axis_tlast,
s_eth_payload_axis_tuser=s_eth_payload_axis_tuser,
m_ip_hdr_valid=m_ip_hdr_valid,
m_ip_hdr_ready=m_ip_hdr_ready,
m_eth_dest_mac=m_eth_dest_mac,
m_eth_src_mac=m_eth_src_mac,
m_eth_type=m_eth_type,
m_ip_version=m_ip_version,
m_ip_ihl=m_ip_ihl,
m_ip_dscp=m_ip_dscp,
m_ip_ecn=m_ip_ecn,
m_ip_length=m_ip_length,
m_ip_identification=m_ip_identification,
m_ip_flags=m_ip_flags,
m_ip_fragment_offset=m_ip_fragment_offset,
m_ip_ttl=m_ip_ttl,
m_ip_protocol=m_ip_protocol,
m_ip_header_checksum=m_ip_header_checksum,
m_ip_source_ip=m_ip_source_ip,
m_ip_dest_ip=m_ip_dest_ip,
m_ip_payload_axis_tdata=m_ip_payload_axis_tdata,
m_ip_payload_axis_tkeep=m_ip_payload_axis_tkeep,
m_ip_payload_axis_tvalid=m_ip_payload_axis_tvalid,
m_ip_payload_axis_tready=m_ip_payload_axis_tready,
m_ip_payload_axis_tlast=m_ip_payload_axis_tlast,
m_ip_payload_axis_tuser=m_ip_payload_axis_tuser,
busy=busy,
error_header_early_termination=error_header_early_termination,
error_payload_early_termination=error_payload_early_termination,
error_invalid_header=error_invalid_header,
error_invalid_checksum=error_invalid_checksum
)
@always(delay(4))
def clkgen():
clk.next = not clk
error_header_early_termination_asserted = Signal(bool(0))
error_payload_early_termination_asserted = Signal(bool(0))
error_invalid_header_asserted = Signal(bool(0))
error_invalid_checksum_asserted = Signal(bool(0))
@always(clk.posedge)
def monitor():
if (error_header_early_termination):
error_header_early_termination_asserted.next = 1
if (error_payload_early_termination):
error_payload_early_termination_asserted.next = 1
if (error_invalid_header):
error_invalid_header_asserted.next = 1
if (error_invalid_checksum):
error_invalid_checksum_asserted.next = 1
def wait_normal():
while s_eth_payload_axis_tvalid or m_ip_payload_axis_tvalid or s_eth_hdr_valid:
yield clk.posedge
def wait_pause_source():
while s_eth_payload_axis_tvalid or m_ip_payload_axis_tvalid or s_eth_hdr_valid:
yield clk.posedge
yield clk.posedge
source_pause.next = False
yield clk.posedge
source_pause.next = True
yield clk.posedge
source_pause.next = False
def wait_pause_sink():
while s_eth_payload_axis_tvalid or m_ip_payload_axis_tvalid or s_eth_hdr_valid:
sink_pause.next = True
yield clk.posedge
yield clk.posedge
yield clk.posedge
sink_pause.next = False
yield clk.posedge
@instance
def check():
yield delay(100)
yield clk.posedge
rst.next = 1
yield clk.posedge
rst.next = 0
yield clk.posedge
yield delay(100)
yield clk.posedge
for payload_len in range(1,18):
yield clk.posedge
print("test 1: test packet, length %d" % payload_len)
current_test.next = 1
test_frame = ip_ep.IPFrame()
test_frame.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame.eth_src_mac = 0x5A5152535455
test_frame.eth_type = 0x0800
test_frame.ip_version = 4
test_frame.ip_ihl = 5
test_frame.ip_length = None
test_frame.ip_identification = 0
test_frame.ip_flags = 2
test_frame.ip_fragment_offset = 0
test_frame.ip_ttl = 64
test_frame.ip_protocol = 0x11
test_frame.ip_header_checksum = None
test_frame.ip_source_ip = 0xc0a80164
test_frame.ip_dest_ip = 0xc0a80165
test_frame.payload = bytearray(range(payload_len))
test_frame.build()
eth_frame = test_frame.build_eth()
for wait in wait_normal, wait_pause_source, wait_pause_sink:
source.send(eth_frame)
yield clk.posedge
yield clk.posedge
yield wait()
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame == test_frame
assert sink.empty()
yield delay(100)
yield clk.posedge
print("test 2: back-to-back packets, length %d" % payload_len)
current_test.next = 2
test_frame1 = ip_ep.IPFrame()
test_frame1.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame1.eth_src_mac = 0x5A5152535455
test_frame1.eth_type = 0x0800
test_frame1.ip_version = 4
test_frame1.ip_ihl = 5
test_frame1.ip_length = None
test_frame1.ip_identification = 0
test_frame1.ip_flags = 2
test_frame1.ip_fragment_offset = 0
test_frame1.ip_ttl = 64
test_frame1.ip_protocol = 0x11
test_frame1.ip_header_checksum = None
test_frame1.ip_source_ip = 0xc0a80164
test_frame1.ip_dest_ip = 0xc0a80165
test_frame1.payload = bytearray(range(payload_len))
test_frame1.build()
test_frame2 = ip_ep.IPFrame()
test_frame2.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame2.eth_src_mac = 0x5A5152535455
test_frame2.eth_type = 0x0800
test_frame2.ip_version = 4
test_frame2.ip_ihl = 5
test_frame2.ip_length = None
test_frame2.ip_identification = 0
test_frame2.ip_flags = 2
test_frame2.ip_fragment_offset = 0
test_frame2.ip_ttl = 64
test_frame2.ip_protocol = 0x11
test_frame2.ip_header_checksum = None
test_frame2.ip_source_ip = 0xc0a80164
test_frame2.ip_dest_ip = 0xc0a80166
test_frame2.payload = bytearray(range(payload_len))
test_frame2.build()
eth_frame1 = test_frame1.build_eth()
eth_frame2 = test_frame2.build_eth()
for wait in wait_normal, wait_pause_source, wait_pause_sink:
source.send(eth_frame1)
source.send(eth_frame2)
yield clk.posedge
yield clk.posedge
yield wait()
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame == test_frame1
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame == test_frame2
assert sink.empty()
yield delay(100)
yield clk.posedge
print("test 3: tuser assert, length %d" % payload_len)
current_test.next = 3
test_frame1 = ip_ep.IPFrame()
test_frame1.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame1.eth_src_mac = 0x5A5152535455
test_frame1.eth_type = 0x0800
test_frame1.ip_version = 4
test_frame1.ip_ihl = 5
test_frame1.ip_length = None
test_frame1.ip_identification = 0
test_frame1.ip_flags = 2
test_frame1.ip_fragment_offset = 0
test_frame1.ip_ttl = 64
test_frame1.ip_protocol = 0x11
test_frame1.ip_header_checksum = None
test_frame1.ip_source_ip = 0xc0a80164
test_frame1.ip_dest_ip = 0xc0a80165
test_frame1.payload = bytearray(range(payload_len))
test_frame1.build()
test_frame2 = ip_ep.IPFrame()
test_frame2.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame2.eth_src_mac = 0x5A5152535455
test_frame2.eth_type = 0x0800
test_frame2.ip_version = 4
test_frame2.ip_ihl = 5
test_frame2.ip_length = None
test_frame2.ip_identification = 0
test_frame2.ip_flags = 2
test_frame2.ip_fragment_offset = 0
test_frame2.ip_ttl = 64
test_frame2.ip_protocol = 0x11
test_frame2.ip_header_checksum = None
test_frame2.ip_source_ip = 0xc0a80164
test_frame2.ip_dest_ip = 0xc0a80166
test_frame2.payload = bytearray(range(payload_len))
test_frame2.build()
eth_frame1 = test_frame1.build_eth()
eth_frame2 = test_frame2.build_eth()
eth_frame1.payload.user = 1
for wait in wait_normal, wait_pause_source, wait_pause_sink:
source.send(eth_frame1)
source.send(eth_frame2)
yield clk.posedge
yield clk.posedge
yield wait()
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame == test_frame1
assert rx_frame.payload.user[-1]
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame == test_frame2
assert sink.empty()
yield delay(100)
yield clk.posedge
print("test 4: trailing bytes (1), length %d" % payload_len)
current_test.next = 4
test_frame1 = ip_ep.IPFrame()
test_frame1.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame1.eth_src_mac = 0x5A5152535455
test_frame1.eth_type = 0x0800
test_frame1.ip_version = 4
test_frame1.ip_ihl = 5
test_frame1.ip_length = None
test_frame1.ip_identification = 0
test_frame1.ip_flags = 2
test_frame1.ip_fragment_offset = 0
test_frame1.ip_ttl = 64
test_frame1.ip_protocol = 0x11
test_frame1.ip_header_checksum = None
test_frame1.ip_source_ip = 0xc0a80164
test_frame1.ip_dest_ip = 0xc0a80165
test_frame1.payload = bytearray(range(payload_len))
test_frame1.build()
test_frame2 = ip_ep.IPFrame()
test_frame2.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame2.eth_src_mac = 0x5A5152535455
test_frame2.eth_type = 0x0800
test_frame2.ip_version = 4
test_frame2.ip_ihl = 5
test_frame2.ip_length = None
test_frame2.ip_identification = 0
test_frame2.ip_flags = 2
test_frame2.ip_fragment_offset = 0
test_frame2.ip_ttl = 64
test_frame2.ip_protocol = 0x11
test_frame2.ip_header_checksum = None
test_frame2.ip_source_ip = 0xc0a80164
test_frame2.ip_dest_ip = 0xc0a80166
test_frame2.payload = bytearray(range(payload_len))
test_frame2.build()
eth_frame1 = test_frame1.build_eth()
eth_frame2 = test_frame2.build_eth()
eth_frame1.payload.data += bytearray(b'\x00')
for wait in wait_normal, wait_pause_source, wait_pause_sink:
source.send(eth_frame1)
source.send(eth_frame2)
yield clk.posedge
yield clk.posedge
yield wait()
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame == test_frame1
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame == test_frame2
assert sink.empty()
yield delay(100)
yield clk.posedge
print("test 5: trailing bytes (10), length %d" % payload_len)
current_test.next = 5
test_frame1 = ip_ep.IPFrame()
test_frame1.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame1.eth_src_mac = 0x5A5152535455
test_frame1.eth_type = 0x0800
test_frame1.ip_version = 4
test_frame1.ip_ihl = 5
test_frame1.ip_length = None
test_frame1.ip_identification = 0
test_frame1.ip_flags = 2
test_frame1.ip_fragment_offset = 0
test_frame1.ip_ttl = 64
test_frame1.ip_protocol = 0x11
test_frame1.ip_header_checksum = None
test_frame1.ip_source_ip = 0xc0a80164
test_frame1.ip_dest_ip = 0xc0a80165
test_frame1.payload = bytearray(range(payload_len))
test_frame1.build()
test_frame2 = ip_ep.IPFrame()
test_frame2.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame2.eth_src_mac = 0x5A5152535455
test_frame2.eth_type = 0x0800
test_frame2.ip_version = 4
test_frame2.ip_ihl = 5
test_frame2.ip_length = None
test_frame2.ip_identification = 0
test_frame2.ip_flags = 2
test_frame2.ip_fragment_offset = 0
test_frame2.ip_ttl = 64
test_frame2.ip_protocol = 0x11
test_frame2.ip_header_checksum = None
test_frame2.ip_source_ip = 0xc0a80164
test_frame2.ip_dest_ip = 0xc0a80166
test_frame2.payload = bytearray(range(payload_len))
test_frame2.build()
eth_frame1 = test_frame1.build_eth()
eth_frame2 = test_frame2.build_eth()
eth_frame1.payload.data += bytearray(b'\x00'*10)
for wait in wait_normal, wait_pause_source, wait_pause_sink:
source.send(eth_frame1)
source.send(eth_frame2)
yield clk.posedge
yield clk.posedge
yield wait()
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame == test_frame1
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame == test_frame2
assert sink.empty()
yield delay(100)
yield clk.posedge
print("test 6: trailing bytes with tuser assert (1), length %d" % payload_len)
current_test.next = 6
test_frame1 = ip_ep.IPFrame()
test_frame1.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame1.eth_src_mac = 0x5A5152535455
test_frame1.eth_type = 0x0800
test_frame1.ip_version = 4
test_frame1.ip_ihl = 5
test_frame1.ip_length = None
test_frame1.ip_identification = 0
test_frame1.ip_flags = 2
test_frame1.ip_fragment_offset = 0
test_frame1.ip_ttl = 64
test_frame1.ip_protocol = 0x11
test_frame1.ip_header_checksum = None
test_frame1.ip_source_ip = 0xc0a80164
test_frame1.ip_dest_ip = 0xc0a80165
test_frame1.payload = bytearray(range(payload_len))
test_frame1.build()
test_frame2 = ip_ep.IPFrame()
test_frame2.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame2.eth_src_mac = 0x5A5152535455
test_frame2.eth_type = 0x0800
test_frame2.ip_version = 4
test_frame2.ip_ihl = 5
test_frame2.ip_length = None
test_frame2.ip_identification = 0
test_frame2.ip_flags = 2
test_frame2.ip_fragment_offset = 0
test_frame2.ip_ttl = 64
test_frame2.ip_protocol = 0x11
test_frame2.ip_header_checksum = None
test_frame2.ip_source_ip = 0xc0a80164
test_frame2.ip_dest_ip = 0xc0a80166
test_frame2.payload = bytearray(range(payload_len))
test_frame2.build()
eth_frame1 = test_frame1.build_eth()
eth_frame2 = test_frame2.build_eth()
eth_frame1.payload.data += bytearray(b'\x00')
eth_frame1.payload.user = 1
for wait in wait_normal, wait_pause_source, wait_pause_sink:
source.send(eth_frame1)
source.send(eth_frame2)
yield clk.posedge
yield clk.posedge
yield wait()
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame == test_frame1
assert rx_frame.payload.user[-1]
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame == test_frame2
assert sink.empty()
yield delay(100)
yield clk.posedge
print("test 7: trailing bytes with tuser assert (10), length %d" % payload_len)
current_test.next = 7
test_frame1 = ip_ep.IPFrame()
test_frame1.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame1.eth_src_mac = 0x5A5152535455
test_frame1.eth_type = 0x0800
test_frame1.ip_version = 4
test_frame1.ip_ihl = 5
test_frame1.ip_length = None
test_frame1.ip_identification = 0
test_frame1.ip_flags = 2
test_frame1.ip_fragment_offset = 0
test_frame1.ip_ttl = 64
test_frame1.ip_protocol = 0x11
test_frame1.ip_header_checksum = None
test_frame1.ip_source_ip = 0xc0a80164
test_frame1.ip_dest_ip = 0xc0a80165
test_frame1.payload = bytearray(range(payload_len))
test_frame1.build()
test_frame2 = ip_ep.IPFrame()
test_frame2.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame2.eth_src_mac = 0x5A5152535455
test_frame2.eth_type = 0x0800
test_frame2.ip_version = 4
test_frame2.ip_ihl = 5
test_frame2.ip_length = None
test_frame2.ip_identification = 0
test_frame2.ip_flags = 2
test_frame2.ip_fragment_offset = 0
test_frame2.ip_ttl = 64
test_frame2.ip_protocol = 0x11
test_frame2.ip_header_checksum = None
test_frame2.ip_source_ip = 0xc0a80164
test_frame2.ip_dest_ip = 0xc0a80166
test_frame2.payload = bytearray(range(payload_len))
test_frame2.build()
eth_frame1 = test_frame1.build_eth()
eth_frame2 = test_frame2.build_eth()
eth_frame1.payload.data += bytearray(b'\x00'*10)
eth_frame1.payload.user = 1
for wait in wait_normal, wait_pause_source, wait_pause_sink:
source.send(eth_frame1)
source.send(eth_frame2)
yield clk.posedge
yield clk.posedge
yield wait()
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame == test_frame1
assert rx_frame.payload.user[-1]
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame == test_frame2
assert sink.empty()
yield delay(100)
yield clk.posedge
print("test 8: truncated payload (1), length %d" % payload_len)
current_test.next = 8
test_frame1 = ip_ep.IPFrame()
test_frame1.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame1.eth_src_mac = 0x5A5152535455
test_frame1.eth_type = 0x0800
test_frame1.ip_version = 4
test_frame1.ip_ihl = 5
test_frame1.ip_length = None
test_frame1.ip_identification = 0
test_frame1.ip_flags = 2
test_frame1.ip_fragment_offset = 0
test_frame1.ip_ttl = 64
test_frame1.ip_protocol = 0x11
test_frame1.ip_header_checksum = None
test_frame1.ip_source_ip = 0xc0a80164
test_frame1.ip_dest_ip = 0xc0a80165
test_frame1.payload = bytearray(range(payload_len+1))
test_frame1.build()
test_frame2 = ip_ep.IPFrame()
test_frame2.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame2.eth_src_mac = 0x5A5152535455
test_frame2.eth_type = 0x0800
test_frame2.ip_version = 4
test_frame2.ip_ihl = 5
test_frame2.ip_length = None
test_frame2.ip_identification = 0
test_frame2.ip_flags = 2
test_frame2.ip_fragment_offset = 0
test_frame2.ip_ttl = 64
test_frame2.ip_protocol = 0x11
test_frame2.ip_header_checksum = None
test_frame2.ip_source_ip = 0xc0a80164
test_frame2.ip_dest_ip = 0xc0a80166
test_frame2.payload = bytearray(range(payload_len))
test_frame2.build()
eth_frame1 = test_frame1.build_eth()
eth_frame2 = test_frame2.build_eth()
eth_frame1.payload.data = eth_frame1.payload.data[:-1]
for wait in wait_normal, wait_pause_source, wait_pause_sink:
error_payload_early_termination_asserted.next = 0
source.send(eth_frame1)
source.send(eth_frame2)
yield clk.posedge
yield clk.posedge
yield wait()
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame.payload.user[-1]
assert error_payload_early_termination_asserted
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame == test_frame2
assert sink.empty()
yield delay(100)
yield clk.posedge
print("test 9: truncated payload (10), length %d" % payload_len)
current_test.next = 9
test_frame1 = ip_ep.IPFrame()
test_frame1.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame1.eth_src_mac = 0x5A5152535455
test_frame1.eth_type = 0x0800
test_frame1.ip_version = 4
test_frame1.ip_ihl = 5
test_frame1.ip_length = None
test_frame1.ip_identification = 0
test_frame1.ip_flags = 2
test_frame1.ip_fragment_offset = 0
test_frame1.ip_ttl = 64
test_frame1.ip_protocol = 0x11
test_frame1.ip_header_checksum = None
test_frame1.ip_source_ip = 0xc0a80164
test_frame1.ip_dest_ip = 0xc0a80165
test_frame1.payload = bytearray(range(payload_len+10))
test_frame1.build()
test_frame2 = ip_ep.IPFrame()
test_frame2.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame2.eth_src_mac = 0x5A5152535455
test_frame2.eth_type = 0x0800
test_frame2.ip_version = 4
test_frame2.ip_ihl = 5
test_frame2.ip_length = None
test_frame2.ip_identification = 0
test_frame2.ip_flags = 2
test_frame2.ip_fragment_offset = 0
test_frame2.ip_ttl = 64
test_frame2.ip_protocol = 0x11
test_frame2.ip_header_checksum = None
test_frame2.ip_source_ip = 0xc0a80164
test_frame2.ip_dest_ip = 0xc0a80166
test_frame2.payload = bytearray(range(payload_len))
test_frame2.build()
eth_frame1 = test_frame1.build_eth()
eth_frame2 = test_frame2.build_eth()
eth_frame1.payload.data = eth_frame1.payload.data[:-10]
for wait in wait_normal, wait_pause_source, wait_pause_sink:
error_payload_early_termination_asserted.next = 0
source.send(eth_frame1)
source.send(eth_frame2)
yield clk.posedge
yield clk.posedge
yield wait()
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame.payload.user[-1]
assert error_payload_early_termination_asserted
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert rx_frame == test_frame2
assert sink.empty()
yield delay(100)
yield clk.posedge
print("test 10: bad IHL, length %d" % payload_len)
current_test.next = 10
test_frame1 = ip_ep.IPFrame()
test_frame1.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame1.eth_src_mac = 0x5A5152535455
test_frame1.eth_type = 0x0800
test_frame1.ip_version = 4
test_frame1.ip_ihl = 6
test_frame1.ip_length = None
test_frame1.ip_identification = 0
test_frame1.ip_flags = 2
test_frame1.ip_fragment_offset = 0
test_frame1.ip_ttl = 64
test_frame1.ip_protocol = 0x11
test_frame1.ip_header_checksum = None
test_frame1.ip_source_ip = 0xc0a80164
test_frame1.ip_dest_ip = 0xc0a80165
test_frame1.payload = bytearray(range(payload_len))
test_frame1.build()
test_frame2 = ip_ep.IPFrame()
test_frame2.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame2.eth_src_mac = 0x5A5152535455
test_frame2.eth_type = 0x0800
test_frame2.ip_version = 4
test_frame2.ip_ihl = 5
test_frame2.ip_length = None
test_frame2.ip_identification = 0
test_frame2.ip_flags = 2
test_frame2.ip_fragment_offset = 0
test_frame2.ip_ttl = 64
test_frame2.ip_protocol = 0x11
test_frame2.ip_header_checksum = None
test_frame2.ip_source_ip = 0xc0a80164
test_frame2.ip_dest_ip = 0xc0a80166
test_frame2.payload = bytearray(range(payload_len))
test_frame2.build()
eth_frame1 = test_frame1.build_eth()
eth_frame2 = test_frame2.build_eth()
for wait in wait_normal, wait_pause_source, wait_pause_sink:
error_invalid_header_asserted.next = 0
source.send(eth_frame1)
source.send(eth_frame2)
yield clk.posedge
yield clk.posedge
yield wait()
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert error_invalid_header_asserted
assert rx_frame == test_frame2
assert sink.empty()
yield delay(100)
yield clk.posedge
print("test 11: bad checksum, length %d" % payload_len)
current_test.next = 11
test_frame1 = ip_ep.IPFrame()
test_frame1.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame1.eth_src_mac = 0x5A5152535455
test_frame1.eth_type = 0x0800
test_frame1.ip_version = 4
test_frame1.ip_ihl = 5
test_frame1.ip_length = None
test_frame1.ip_identification = 0
test_frame1.ip_flags = 2
test_frame1.ip_fragment_offset = 0
test_frame1.ip_ttl = 64
test_frame1.ip_protocol = 0x11
test_frame1.ip_header_checksum = 0x1234
test_frame1.ip_source_ip = 0xc0a80164
test_frame1.ip_dest_ip = 0xc0a80165
test_frame1.payload = bytearray(range(payload_len))
test_frame1.build()
test_frame2 = ip_ep.IPFrame()
test_frame2.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame2.eth_src_mac = 0x5A5152535455
test_frame2.eth_type = 0x0800
test_frame2.ip_version = 4
test_frame2.ip_ihl = 5
test_frame2.ip_length = None
test_frame2.ip_identification = 0
test_frame2.ip_flags = 2
test_frame2.ip_fragment_offset = 0
test_frame2.ip_ttl = 64
test_frame2.ip_protocol = 0x11
test_frame2.ip_header_checksum = None
test_frame2.ip_source_ip = 0xc0a80164
test_frame2.ip_dest_ip = 0xc0a80166
test_frame2.payload = bytearray(range(payload_len))
test_frame2.build()
eth_frame1 = test_frame1.build_eth()
eth_frame2 = test_frame2.build_eth()
for wait in wait_normal, wait_pause_source, wait_pause_sink:
error_invalid_checksum_asserted.next = 0
source.send(eth_frame1)
source.send(eth_frame2)
yield clk.posedge
yield clk.posedge
yield wait()
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert error_invalid_checksum_asserted
assert rx_frame == test_frame2
assert sink.empty()
yield delay(100)
for length in range(1,21):
yield clk.posedge
print("test 12: truncated header, length %d" % length)
current_test.next = 12
test_frame1 = ip_ep.IPFrame()
test_frame1.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame1.eth_src_mac = 0x5A5152535455
test_frame1.eth_type = 0x0800
test_frame1.ip_version = 4
test_frame1.ip_ihl = 5
test_frame1.ip_length = None
test_frame1.ip_identification = 0
test_frame1.ip_flags = 2
test_frame1.ip_fragment_offset = 0
test_frame1.ip_ttl = 64
test_frame1.ip_protocol = 0x11
test_frame1.ip_header_checksum = None
test_frame1.ip_source_ip = 0xc0a80164
test_frame1.ip_dest_ip = 0xc0a80165
test_frame1.payload = bytearray(range(16))
test_frame1.build()
test_frame2 = ip_ep.IPFrame()
test_frame2.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame2.eth_src_mac = 0x5A5152535455
test_frame2.eth_type = 0x0800
test_frame2.ip_version = 4
test_frame2.ip_ihl = 5
test_frame2.ip_length = None
test_frame2.ip_identification = 0
test_frame2.ip_flags = 2
test_frame2.ip_fragment_offset = 0
test_frame2.ip_ttl = 64
test_frame2.ip_protocol = 0x11
test_frame2.ip_header_checksum = None
test_frame2.ip_source_ip = 0xc0a80164
test_frame2.ip_dest_ip = 0xc0a80166
test_frame2.payload = bytearray(range(16))
test_frame2.build()
eth_frame1 = test_frame1.build_eth()
eth_frame2 = test_frame2.build_eth()
eth_frame1.payload.data = eth_frame1.payload.data[:length]
for wait in wait_normal, wait_pause_source, wait_pause_sink:
error_header_early_termination_asserted.next = 0
source.send(eth_frame1)
source.send(eth_frame2)
yield clk.posedge
yield clk.posedge
yield wait()
yield clk.posedge
yield sink.wait()
rx_frame = sink.recv()
assert error_header_early_termination_asserted
assert rx_frame == test_frame2
assert sink.empty()
yield delay(100)
raise StopSimulation
return instances()
def test_bench():
sim = Simulation(bench())
sim.run()
if __name__ == '__main__':
print("Running test...")
test_bench()
| mit | -8,964,308,664,283,597,000 | 34.560377 | 91 | 0.5555 | false |
zleap/nestedloops | turtle-loop4.3.py | 1 | 1437 | #!/usr/bin/env python
#more advanced nested loops program.
#this draws a shape starts small repeats getting bigger each time,
#change the range x,72 to a bigger number
#note turtle.forward(x) is linked to the range or how many
import time
import turtle
import math
#set file name
#fname= raw_input("Filename (must include .eps ")
fname="dial.eps"
sides = raw_input("number of sides to shape ")
i = sides.isdigit()
while i != True:
print("Input MUST be a number")
sides = raw_input("number of sides to shape ")
i = sides.isdigit()
sides = int(sides)
shapeangle = 360 / sides
print shapeangle
print turtle.pencolor()
print turtle.fillcolor()
print turtle.pensize()
turtle.pen(fillcolor="Red", pencolor="blue", pensize=1)
for x in range(25,50):
turtle.fill(True)
turtle.left(5)
for n in range(0,sides):
turtle.forward(x)
turtle.left(shapeangle)
turtle.fill(1)
turtle.forward(50)
for x in range(25,50):
turtle.fill(True)
turtle.left(5)
for n in range(0,sides):
turtle.backward(x)
turtle.left(shapeangle)
turtle.fill(1)
#hide turtle once drawing complete
turtle.hideturtle()
#pause
time.sleep(1)
#grab screen
ts = turtle.getscreen()
#save canvas to filename specified earlier
ts.getcanvas().postscript(file=fname)
#tell user that file is saved to the specified filename
print "Saved image to: ", fname
#prompt user to click image to exit
print "All done. Click image to exit."
turtle.exitonclick()
| gpl-2.0 | 6,102,157,318,396,206,000 | 19.239437 | 67 | 0.729297 | false |
inteligencia-coletiva-lsd/pybossa | test/test_jobs/test_send_mail.py | 6 | 1426 | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2014 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.jobs import send_mail
from mock import patch
@patch('pybossa.jobs.mail')
@patch('pybossa.jobs.Message')
class TestSendMailJob(object):
def test_send_mail_craetes_message(self, Message, mail):
mail_dict = dict(subject='Hello', recipients=['pepito@hotmail.con'],
body='Hello Pepito!')
send_mail(mail_dict)
Message.assert_called_once_with(**mail_dict)
def test_send_mail_sends_mail(self, Message, mail):
mail_dict = dict(subject='Hello', recipients=['pepito@hotmail.con'],
body='Hello Pepito!')
send_mail(mail_dict)
mail.send.assert_called_once_with(Message())
| agpl-3.0 | 5,602,253,873,254,815,000 | 36.526316 | 77 | 0.693548 | false |
fishilico/selinux-refpolicy-patched | policy/flask/flask.py | 16 | 14520 | #!/usr/bin/python -E
#
# Author(s): Caleb Case <ccase@tresys.com>
#
# Adapted from the bash/awk scripts mkflask.sh and mkaccess_vector.sh
#
import getopt
import os
import sys
import re
class ParseError(Exception):
def __init__(self, type, file, line):
self.type = type
self.file = file
self.line = line
def __str__(self):
typeS = self.type
if type(self.type) is not str: typeS = Flask.CONSTANT_S[self.type]
return "Parse Error: Unexpected %s on line %d of %s." % (typeS, self.line, self.file)
class DuplicateError(Exception):
def __init__(self, type, file, line, symbol):
self.type = type
self.file = file
self.line = line
self.symbol = symbol
def __str__(self):
typeS = self.type
if type(self.type) is not str: typeS = Flask.CONSTANT_S[self.type]
return "Duplicate Error: Duplicate %s '%s' on line %d of %s." % (typeS, self.symbol, self.line, self.file)
class UndefinedError(Exception):
def __init__(self, type, file, line, symbol):
self.type = type
self.file = file
self.line = line
self.symbol = symbol
def __str__(self):
typeS = self.type
if type(self.type) is not str: typeS = Flask.CONSTANT_S[self.type]
return "Undefined Error: %s '%s' is not defined but used on line %d of %s." % (typeS, self.symbol, self.line, self.file)
class UnusedError(Exception):
def __init__(self, info):
self.info = info
def __str__(self):
return "Unused Error: %s" % self.info
class Flask:
'''
FLASK container class with utilities for parsing definition
files and creating c header files.
'''
#Constants used in definitions parsing.
WHITE = re.compile(r'^\s*$')
COMMENT = re.compile(r'^\s*#')
USERFLAG = re.compile(r'# userspace')
CLASS = re.compile(r'^class (?P<name>\w+)')
COMMON = re.compile(r'^common (?P<name>\w+)')
INHERITS = re.compile(r'^inherits (?P<name>\w+)')
OPENB = re.compile(r'^{')
VECTOR = re.compile(r'^\s*(?P<name>\w+)')
CLOSEB = re.compile(r'^}')
SID = re.compile(r'^sid (?P<name>\w+)')
EOF = "end of file"
#Constants used in header generation.
USERSPACE = 0
KERNEL = 1
CONSTANT_S = { \
#parsing constants
WHITE : "whitespace", \
COMMENT : "comment", \
USERFLAG : "userspace flag", \
CLASS : "class definition", \
COMMON : "common definition", \
INHERITS : "inherits definition", \
OPENB : "'{'", \
VECTOR : "access vector definition", \
CLOSEB : "'}'", \
SID : "security identifier", \
EOF : "end of file", \
#generation constants
USERSPACE : "userspace mode", \
KERNEL : "kernel mode", \
}
def __init__(self, warn = True):
self.WARN = warn
self.autogen = "/* This file is automatically generated. Do not edit. */\n"
self.commons = []
self.user_commons = []
self.common = {}
self.classes = []
self.vectors = []
self.vector = {}
self.userspace = {}
self.sids = []
self.inherits = {}
def warning(self, msg):
'''
Prints a warning message out to stderr if warnings are enabled.
'''
if self.WARN: sys.stderr.write("Warning: %s\n" % msg)
def parseClasses(self, path):
'''
Parses security class definitions from the given path.
'''
classes = []
input = open(path, 'r')
number = 0
for line in input:
number += 1
m = self.COMMENT.search(line)
if m: continue
m = self.WHITE.search(line)
if m: continue
m = self.CLASS.search(line)
if m:
g = m.groupdict()
c = g['name']
if c in classes: raise DuplicateError, (self.CLASS, path, number, c)
classes.append(c)
if self.USERFLAG.search(line):
self.userspace[c] = True
else:
self.userspace[c] = False
continue
raise ParseError, ("data. Was expecting either a comment, whitespace, or class definition. ", path, number)
self.classes = classes
return classes
def parseSids(self, path):
'''
Parses initial SID definitions from the given path.
'''
sids = []
input = open(path, 'r')
for line in input:
m = self.COMMENT.search(line)
if m: continue
m = self.WHITE.search(line)
if m: continue
m = self.SID.search(line)
if m:
g = m.groupdict()
s = g['name']
if s in sids: raise DuplicateError, (self.SID, path, number, s)
sids.append(s)
continue
raise ParseError, ("data. Was expecting either a comment, whitespace, or security identifier. ", path, number)
self.sids = sids
return sids
def parseVectors(self, path):
'''
Parses access vector definitions from the given path.
'''
vectors = []
vector = {}
commons = []
common = {}
inherits = {}
user_commons = {}
input = open(path, 'r')
# states
NONE = 0
COMMON = 1
CLASS = 2
INHERIT = 3
OPEN = 4
state = NONE
state2 = NONE
number = 0
for line in input:
number += 1
m = self.COMMENT.search(line)
if m: continue
m = self.WHITE.search(line)
if m:
if state == INHERIT:
state = NONE
continue
m = self.COMMON.search(line)
if m:
if state != NONE: raise ParseError, (self.COMMON, path, number)
g = m.groupdict()
c = g['name']
if c in commons: raise DuplicateError, (self.COMMON, path, number, c)
commons.append(c)
common[c] = []
user_commons[c] = True
state = COMMON
continue
m = self.CLASS.search(line)
if m:
if state != NONE: raise ParseError, (self.CLASS, number)
g = m.groupdict()
c = g['name']
if c in vectors: raise DuplicateError, (self.CLASS, path, number, c)
if c not in self.classes: raise UndefinedError, (self.CLASS, path, number, c)
vectors.append(c)
vector[c] = []
state = CLASS
continue
m = self.INHERITS.search(line)
if m:
if state != CLASS: raise ParseError, (self.INHERITS, number)
g = m.groupdict()
i = g['name']
if c in inherits: raise DuplicateError, (self.INHERITS, path, number, c)
if i not in common: raise UndefinedError, (self.COMMON, path, number, i)
inherits[c] = i
state = INHERIT
if not self.userspace[c]: user_commons[i] = False
continue
m = self.OPENB.search(line)
if m:
if (state != CLASS \
and state != INHERIT \
and state != COMMON) \
or state2 != NONE:
raise ParseError, (self.OPENB, path, number)
state2 = OPEN
continue
m = self.VECTOR.search(line)
if m:
if state2 != OPEN: raise ParseError, (self.VECTOR, path, number)
g = m.groupdict()
v = g['name']
if state == CLASS or state == INHERIT:
if v in vector[c]: raise DuplicateError, (self.VECTOR, path, number, v)
vector[c].append(v)
elif state == COMMON:
if v in common[c]: raise DuplicateError, (self.VECTOR, path, number, v)
common[c].append(v)
continue
m = self.CLOSEB.search(line)
if m:
if state2 != OPEN: raise ParseError, (self.CLOSEB, path, number)
state = NONE
state2 = NONE
c = None
continue
raise ParseError, ("data", path, number)
if state != NONE and state2 != NONE: raise ParseError, (self.EOF, path, number)
cvdiff = set(self.classes) - set(vectors)
if cvdiff: raise UnusedError, "Not all security classes were used in access vectors: %s" % cvdiff # the inverse of this will be caught as an undefined class error
self.commons = commons
self.user_commons = user_commons
self.common = common
self.vectors = vectors
self.vector = vector
self.inherits = inherits
return vector
def createHeaders(self, path, mode = USERSPACE):
'''
Creates the C header files in the specified MODE and outputs
them to give PATH.
'''
headers = { \
'av_inherit.h' : self.createAvInheritH(mode), \
'av_perm_to_string.h' : self.createAvPermToStringH(mode), \
'av_permissions.h' : self.createAvPermissionsH(mode), \
'class_to_string.h' : self.createClassToStringH(mode), \
'common_perm_to_string.h' : self.createCommonPermToStringH(mode), \
'flask.h' : self.createFlaskH(mode), \
'initial_sid_to_string.h' : self.createInitialSidToStringH(mode) \
}
for key, value in headers.items():
of = open(os.path.join(path, key), 'w')
of.writelines(value)
of.close()
def createUL(self, count):
fields = [1, 2, 4, 8]
return "0x%08xUL" % (fields[count % 4] << 4 * (count / 4))
def createAvInheritH(self, mode = USERSPACE):
'''
'''
results = []
results.append(self.autogen)
for c in self.vectors:
if c in self.inherits:
i = self.inherits[c]
count = len(self.common[i])
if not (mode == self.KERNEL and self.userspace[c]):
results.append(" S_(SECCLASS_%s, %s, %s)\n" % (c.upper(), i, self.createUL(count)))
return results
def createAvPermToStringH(self, mode = USERSPACE):
'''
'''
results = []
results.append(self.autogen)
for c in self.vectors:
for p in self.vector[c]:
if not (mode == self.KERNEL and self.userspace[c]):
results.append(" S_(SECCLASS_%s, %s__%s, \"%s\")\n" % (c.upper(), c.upper(), p.upper(), p))
return results
def createAvPermissionsH(self, mode = USERSPACE):
'''
'''
results = []
results.append(self.autogen)
width = 57
count = 0
for common in self.commons:
count = 0
shift = 0
for p in self.common[common]:
if not (mode == self.KERNEL and self.user_commons[common]):
columnA = "#define COMMON_%s__%s " % (common.upper(), p.upper())
columnA += "".join([" " for i in range(width - len(columnA))])
results.append("%s%s\n" % (columnA, self.createUL(count)))
count += 1
width = 50 # broken for old tools whitespace
for c in self.vectors:
count = 0
ps = []
if c in self.inherits:
ps += self.common[self.inherits[c]]
ps += self.vector[c]
for p in ps:
columnA = "#define %s__%s " % (c.upper(), p.upper())
columnA += "".join([" " for i in range(width - len(columnA))])
if not (mode == self.KERNEL and self.userspace[c]):
results.append("%s%s\n" % (columnA, self.createUL(count)))
count += 1
return results
def createClassToStringH(self, mode = USERSPACE):
'''
'''
results = []
results.append(self.autogen)
results.append("/*\n * Security object class definitions\n */\n")
if mode == self.KERNEL:
results.append(" S_(NULL)\n")
else:
results.append(" S_(\"null\")\n")
for c in self.classes:
if mode == self.KERNEL and self.userspace[c]:
results.append(" S_(NULL)\n")
else:
results.append(" S_(\"%s\")\n" % c)
return results
def createCommonPermToStringH(self, mode = USERSPACE):
'''
'''
results = []
results.append(self.autogen)
for common in self.commons:
if not (mode == self.KERNEL and self.user_commons[common]):
results.append("TB_(common_%s_perm_to_string)\n" % common)
for p in self.common[common]:
results.append(" S_(\"%s\")\n" % p)
results.append("TE_(common_%s_perm_to_string)\n\n" % common)
return results
def createFlaskH(self, mode = USERSPACE):
'''
'''
results = []
results.append(self.autogen)
results.append("#ifndef _SELINUX_FLASK_H_\n")
results.append("#define _SELINUX_FLASK_H_\n")
results.append("\n")
results.append("/*\n")
results.append(" * Security object class definitions\n")
results.append(" */\n")
count = 0
width = 57
for c in self.classes:
count += 1
columnA = "#define SECCLASS_%s " % c.upper()
columnA += "".join([" " for i in range(width - len(columnA))])
if not (mode == self.KERNEL and self.userspace[c]):
results.append("%s%d\n" % (columnA, count))
results.append("\n")
results.append("/*\n")
results.append(" * Security identifier indices for initial entities\n")
results.append(" */\n")
count = 0
width = 56 # broken for old tools whitespace
for s in self.sids:
count += 1
columnA = "#define SECINITSID_%s " % s.upper()
columnA += "".join([" " for i in range(width - len(columnA))])
results.append("%s%d\n" % (columnA, count))
results.append("\n")
columnA = "#define SECINITSID_NUM "
columnA += "".join([" " for i in range(width - len(columnA))])
results.append("%s%d\n" % (columnA, count))
results.append("\n")
results.append("#endif\n")
return results
def createInitialSidToStringH(self, mode = USERSPACE):
'''
'''
results = []
results.append(self.autogen)
results.append("static char *initial_sid_to_string[] =\n")
results.append("{\n")
results.append(" \"null\",\n")
for s in self.sids:
results.append(" \"%s\",\n" % s)
results.append("};\n")
results.append("\n")
return results
def usage():
'''
Returns the usage string.
'''
usage = 'Usage: %s -a ACCESS_VECTORS -i INITIAL_SIDS -s SECURITY_CLASSES -o OUTPUT_DIRECTORY -k|-u [-w]\n' % os.path.basename(sys.argv[0])
usage += '\n'
usage += ' -a --access_vectors\taccess vector definitions\n'
usage += ' -i --initial_sids\tinitial sid definitions\n'
usage += ' -s --security_classes\tsecurity class definitions\n'
usage += ' -o --output\toutput directory for generated files\n'
usage += ' -k --kernel\toutput mode set to kernel (kernel headers contain empty blocks for all classes specified with # userspace in the security_classes file)\n'
usage += ' -u --user\toutput mode set to userspace\n'
usage += ' -w --nowarnings\tsupresses output of warning messages\n'
return usage
########## MAIN ##########
if __name__ == '__main__':
# Parse command line args
try:
opts, args = getopt.getopt(sys.argv[1:], 'a:i:s:o:kuwh', ['access_vectors=', 'initial_sids=', 'security_classes=', 'output=', 'kernel', 'user', 'nowarnings', 'help'])
except getopt.GetoptError:
print(usage())
sys.exit(2)
avec = None
isid = None
secc = None
outd = None
mode = None
warn = True
for o, a in opts:
if o in ('-h', '--help'):
print(usage())
sys.exit(0)
elif o in ('-a', '--access_vectors'):
avec = a
elif o in ('-i', '--initial_sids'):
isid = a
elif o in ('-s', '--security_classes'):
secc = a
elif o in ('-o', '--output'):
outd = a
elif o in ('-k', '--kernel'):
if mode != None:
print(usage())
sys.exit(2)
mode = Flask.KERNEL
elif o in ('-u', '--user'):
if mode != None:
print(usage())
sys.exit(2)
mode = Flask.USERSPACE
elif o in ('-w', '--nowarnings'):
warn = False
else:
print(usage())
sys.exit(2)
if avec == None or \
isid == None or \
secc == None or \
outd == None:
print(usage())
sys.exit(2)
try:
f = Flask(warn)
f.parseSids(isid)
f.parseClasses(secc)
f.parseVectors(avec)
f.createHeaders(outd, mode)
except Exception, e:
print(e)
sys.exit(2)
| gpl-2.0 | 2,275,156,658,115,813,000 | 26.089552 | 168 | 0.61708 | false |
mbohlool/client-python | kubernetes/client/models/v1_tcp_socket_action.py | 1 | 4029 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1TCPSocketAction(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'host': 'str',
'port': 'object'
}
attribute_map = {
'host': 'host',
'port': 'port'
}
def __init__(self, host=None, port=None):
"""
V1TCPSocketAction - a model defined in Swagger
"""
self._host = None
self._port = None
self.discriminator = None
if host is not None:
self.host = host
self.port = port
@property
def host(self):
"""
Gets the host of this V1TCPSocketAction.
Optional: Host name to connect to, defaults to the pod IP.
:return: The host of this V1TCPSocketAction.
:rtype: str
"""
return self._host
@host.setter
def host(self, host):
"""
Sets the host of this V1TCPSocketAction.
Optional: Host name to connect to, defaults to the pod IP.
:param host: The host of this V1TCPSocketAction.
:type: str
"""
self._host = host
@property
def port(self):
"""
Gets the port of this V1TCPSocketAction.
Number or name of the port to access on the container. Number must be in the range 1 to 65535. Name must be an IANA_SVC_NAME.
:return: The port of this V1TCPSocketAction.
:rtype: object
"""
return self._port
@port.setter
def port(self, port):
"""
Sets the port of this V1TCPSocketAction.
Number or name of the port to access on the container. Number must be in the range 1 to 65535. Name must be an IANA_SVC_NAME.
:param port: The port of this V1TCPSocketAction.
:type: object
"""
if port is None:
raise ValueError("Invalid value for `port`, must not be `None`")
self._port = port
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1TCPSocketAction):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| apache-2.0 | -7,446,419,965,808,691,000 | 24.993548 | 133 | 0.535865 | false |
tabo/feedjack | feedjack/urls.py | 4 | 1348 | # -*- coding: utf-8 -*-
"""
feedjack
Gustavo Picón
urls.py
"""
from django.conf.urls.defaults import patterns
from django.views.generic.simple import redirect_to
from feedjack import views
urlpatterns = patterns('',
(r'^rss20.xml$', redirect_to,
{'url':'/feed/rss/'}),
(r'^feed/$', redirect_to,
{'url':'/feed/atom/'}),
(r'^feed/rss/$', views.rssfeed),
(r'^feed/atom/$', views.atomfeed),
(r'^feed/user/(?P<user>\d+)/tag/(?P<tag>.*)/$', redirect_to,
{'url':'/feed/atom/user/%(user)s/tag/%(tag)s/'}),
(r'^feed/user/(?P<user>\d+)/$', redirect_to,
{'url':'/feed/atom/user/%(user)s/'}),
(r'^feed/tag/(?P<tag>.*)/$', redirect_to,
{'url':'/feed/atom/tag/%(tag)s/'}),
(r'^feed/atom/user/(?P<user>\d+)/tag/(?P<tag>.*)/$', views.atomfeed),
(r'^feed/atom/user/(?P<user>\d+)/$', views.atomfeed),
(r'^feed/atom/tag/(?P<tag>.*)/$', views.atomfeed),
(r'^feed/rss/user/(?P<user>\d+)/tag/(?P<tag>.*)/$', views.rssfeed),
(r'^feed/rss/user/(?P<user>\d+)/$', views.rssfeed),
(r'^feed/rss/tag/(?P<tag>.*)/$', views.rssfeed),
(r'^user/(?P<user>\d+)/tag/(?P<tag>.*)/$', views.mainview),
(r'^user/(?P<user>\d+)/$', views.mainview),
(r'^tag/(?P<tag>.*)/$', views.mainview),
(r'^opml/$', views.opml),
(r'^foaf/$', views.foaf),
(r'^$', views.mainview),
)
#~
| bsd-3-clause | 7,027,726,029,476,670,000 | 27.659574 | 73 | 0.530809 | false |
Ragowit/fireplace | fireplace/cards/classic/mage.py | 2 | 3020 | from ..utils import *
##
# Hero Powers
# Fireblast (Jaina Proudmoore)
class CS2_034:
activate = Hit(TARGET, 1)
# Fireblast (Medivh)
class CS2_034_H1:
activate = CS2_034.activate
# Fireblast (Khadgar)
class CS2_034_H2:
activate = CS2_034.activate
##
# Minions
# Water Elemental
class CS2_033:
events = Damage(CHARACTER, None, SELF).on(Freeze(Damage.TARGET))
# Ethereal Arcanist
class EX1_274:
events = OWN_TURN_END.on(Find(FRIENDLY_SECRETS) & Buff(SELF, "EX1_274e"))
EX1_274e = buff(+2, +2)
# Archmage Antonidas
class EX1_559:
events = OWN_SPELL_PLAY.on(Give(CONTROLLER, "CS2_029"))
# Sorcerer's Apprentice
class EX1_608:
update = Refresh(FRIENDLY_HAND + SPELL, {GameTag.COST: -1})
# Kirin Tor Mage
class EX1_612:
play = Buff(FRIENDLY_HERO, "EX1_612o")
class EX1_612o:
update = Refresh(FRIENDLY_HAND + SECRET, {GameTag.COST: SET(0)})
events = Play(CONTROLLER, SECRET).on(Destroy(SELF))
# Mana Wyrm
class NEW1_012:
events = OWN_SPELL_PLAY.on(Buff(SELF, "NEW1_012o"))
NEW1_012o = buff(atk=1)
##
# Spells
# Polymorph
class CS2_022:
play = Morph(TARGET, "CS2_tk1")
# Arcane Intellect
class CS2_023:
play = Draw(CONTROLLER) * 2
# Frostbolt
class CS2_024:
play = Hit(TARGET, 3), Freeze(TARGET)
# Arcane Explosion
class CS2_025:
play = Hit(ENEMY_MINIONS, 1)
# Frost Nova
class CS2_026:
play = Freeze(ENEMY_MINIONS)
# Mirror Image
class CS2_027:
play = Summon(CONTROLLER, "CS2_mirror") * 2
# Blizzard
class CS2_028:
play = Hit(ENEMY_MINIONS, 2), Freeze(ENEMY_MINIONS)
# Fireball
class CS2_029:
play = Hit(TARGET, 6)
# Ice Lance
class CS2_031:
play = Find(TARGET + FROZEN) & Hit(TARGET, 4) | Freeze(TARGET)
# Flamestrike
class CS2_032:
play = Hit(ENEMY_MINIONS, 4)
# Cone of Cold
class EX1_275:
play = Hit(TARGET | TARGET_ADJACENT, 1), Freeze(TARGET | TARGET_ADJACENT)
# Arcane Missiles
class EX1_277:
def play(self):
count = self.controller.get_spell_damage(3)
yield Hit(RANDOM_ENEMY_CHARACTER, 1) * count
# Pyroblast
class EX1_279:
play = Hit(TARGET, 10)
##
# Secrets
# Spellbender
class tt_010:
secret = Play(OPPONENT, SPELL, MINION).on(FULL_BOARD | (
Reveal(SELF), Retarget(Play.CARD, Summon(CONTROLLER, "tt_010a"))
))
# Counterspell
class EX1_287:
secret = Play(OPPONENT, SPELL).on(
Reveal(SELF), Counter(Play.CARD)
)
# Ice Barrier
class EX1_289:
secret = Attack(CHARACTER, FRIENDLY_HERO).on(
Reveal(SELF), GainArmor(FRIENDLY_HERO, 8)
)
# Mirror Entity
class EX1_294:
secret = [
Play(OPPONENT, MINION).after(
Reveal(SELF), Summon(CONTROLLER, ExactCopy(Play.CARD))
),
Play(OPPONENT, ID("EX1_323h")).after(
Reveal(SELF), Summon(CONTROLLER, "EX1_323")
) # :-)
]
# Ice Block
class EX1_295:
secret = Predamage(FRIENDLY_HERO).on(
Lethal(FRIENDLY_HERO, Predamage.AMOUNT) & (
Reveal(SELF),
Buff(FRIENDLY_HERO, "EX1_295o"),
Predamage(FRIENDLY_HERO, 0)
)
)
EX1_295o = buff(immune=True)
# Vaporize
class EX1_594:
secret = Attack(MINION, FRIENDLY_HERO).on(
Reveal(SELF), Destroy(Attack.ATTACKER)
)
| agpl-3.0 | 2,376,840,009,213,906,000 | 15.324324 | 74 | 0.683775 | false |
efectivo/network_sim | diamond1_basic.py | 1 | 1640 | from units import runner, results_to_file
def job_gen():
run_num = 1
for dashed in [0, 1]:
for N in range(10, 101, 10):
k = 3
for run_id in range(run_num):
test = {
'test': {},
'net': {'topology': 'diamond', 'dashed': dashed, 'N': N, 'k': k},
'pattern': {'type': 'uniform_src_poisson_rate'},
'run_id': run_id,
'cycles': 100000,
'protocols': [
{'type': 'greedy', 'scheduler': 'LIS'},
{'type': 'goed', 'scheduler': 'LIS'}
]
}
yield test
for k in range(1, 11):
N = 50
for run_id in range(run_num):
test = {
'test': {},
'net': {'topology': 'diamond', 'dashed': dashed, 'N': N, 'k': k},
'pattern': {'type': 'uniform_src_poisson_rate'},
'run_id': run_id,
'cycles': 100000,
'protocols': [
{'type': 'greedy', 'scheduler': 'LIS'},
{'type': 'goed', 'dh_type': 'odd_even_downhill', 'scheduler': 'LIS'}
]
}
yield test
def job_run(test):
out = runner.run_single_sim(test)
return out
import multiprocessing
p = multiprocessing.Pool(15)
writer = results_to_file.ResultHandler('diamond1')
all_results = p.map(job_run, job_gen())
for result in all_results:
writer.write(result)
writer.close()
| mit | 7,274,586,252,964,647,000 | 30.538462 | 92 | 0.414634 | false |
tristan0x/hpcbench | tests/test_driver.py | 1 | 10271 | from collections import namedtuple
from functools import reduce
import json
import logging
import os
import os.path as osp
import shutil
import tempfile
import unittest
from cached_property import cached_property
import six
import yaml
from hpcbench.api import Benchmark
from hpcbench.campaign import ReportNode
from hpcbench.cli import bendoc, benelastic, benumb
from hpcbench.driver import CampaignDriver
from hpcbench.driver.executor import SrunExecutionDriver, Command
from hpcbench.driver.campaign import HostDriver, BenchmarkTagDriver
from hpcbench.driver.benchmark import (
BenchmarkDriver,
BenchmarkCategoryDriver,
FixedAttempts,
)
from hpcbench.toolbox.contextlib_ext import capture_stdout, mkdtemp, pushd
from . import BuildInfoBench, DriverTestCase, FakeBenchmark
from .benchmark.benchmark import AbstractBenchmarkTest
LOGGER = logging.getLogger('hpcbench')
class TestDriver(DriverTestCase, unittest.TestCase):
def test_get_unknown_benchmark_class(self):
with self.assertRaises(NameError) as exc:
Benchmark.get_subclass('unkn0wnb3nchm4rk')
self.assertEqual(
str(exc.exception), "Not a valid Benchmark class: unkn0wnb3nchm4rk"
)
def test_run_01(self):
self.assertTrue(osp.isdir(self.CAMPAIGN_PATH))
# ensure metrics have been generated
aggregated_metrics_f = osp.join(
TestDriver.CAMPAIGN_PATH,
TestDriver.driver.node,
'*',
'test_fake',
'main',
'metrics.json',
)
# use report API to ensure all commands succeeded
report = ReportNode(TestDriver.CAMPAIGN_PATH)
self.assertEqual(list(report.collect('command_succeeded')), [True] * 3)
self.assertTrue(
osp.isfile(aggregated_metrics_f), "Not file: " + aggregated_metrics_f
)
with open(aggregated_metrics_f) as istr:
aggregated_metrics = json.load(istr)
self.assertTrue(len(aggregated_metrics), 3)
def test_02_number(self):
self.assertIsNotNone(TestDriver.CAMPAIGN_PATH)
benumb.main(TestDriver.CAMPAIGN_PATH)
# FIXME add checks
def test_04_report(self):
self.assertIsNotNone(TestDriver.CAMPAIGN_PATH)
with capture_stdout() as stdout:
bendoc.main(TestDriver.CAMPAIGN_PATH)
content = stdout.getvalue()
self.assertTrue(content)
@unittest.skipIf(
'UT_SKIP_ELASTICSEARCH' in os.environ, 'manually disabled from environment'
)
def test_05_es_dump(self):
# Push documents to Elasticsearch
argv = [TestDriver.CAMPAIGN_PATH]
if 'UT_ELASTICSEARCH_HOST' in os.environ:
argv += ['--es', os.environ['UT_ELASTICSEARCH_HOST']]
exporter = benelastic.main(TestDriver.CAMPAIGN_PATH)
# Ensure they are searchable
exporter.index_client.refresh(exporter.index_name)
# Expect 3 documents in the index dedicated to the campaign
resp = exporter.es_client.count(index=exporter.index_name)
self.assertEqual(resp['count'], 3)
if 'UT_KEEP_ELASTICSEARCH_INDEX' not in os.environ:
# Cleanup
exporter.remove_index()
class TestFakeBenchmark(AbstractBenchmarkTest, unittest.TestCase):
exposed_benchmark = False
def get_benchmark_clazz(self):
return FakeBenchmark
def get_expected_metrics(self, category):
return dict(
performance=10.0,
standard_error=1.0,
pairs=[dict(first=1.5, second=True), dict(first=3.0, second=False)],
)
def get_benchmark_categories(self):
return ['main']
class TestHostDriver(unittest.TestCase):
CAMPAIGN = dict(
network=dict(
nodes=['node{0:02}'.format(id_) for id_ in range(1, 11)],
tags=reduce(
lambda x, y: dict(x, **y),
(
dict(
('n{0:02}'.format(id_), dict(nodes=['node{0:02}'.format(id_)]))
for id_ in range(1, 11)
),
dict(
group_nodes=[
dict(nodes=["node01", "node02"]),
dict(nodes=["node03"]),
],
group_match=dict(match="node1.*"),
group_rectags=dict(tags=["group_match", "group_nodes"]),
group_localhost=[dict(nodes=["localhost"])],
),
),
),
)
)
@classmethod
def setUpClass(cls):
cls.TEST_DIR = tempfile.mkdtemp(prefix='hpcbench-ut')
cls.CAMPAIGN_FILE = osp.join(cls.TEST_DIR, 'campaign.yaml')
with open(cls.CAMPAIGN_FILE, 'w') as ostr:
yaml.dump(cls.CAMPAIGN, ostr, default_flow_style=False)
cls.DRIVER = CampaignDriver(cls.CAMPAIGN_FILE)
def host_driver(self, node):
return HostDriver(CampaignDriver(TestHostDriver.CAMPAIGN_FILE, node=node), node)
def test_host_driver_children(self):
self.assertEqual(
self.host_driver('node01').children,
{'*', 'n01', 'group_nodes', 'group_rectags', 'group_localhost'},
)
self.assertEqual(
self.host_driver('node10').children,
{'*', 'n10', 'group_match', 'group_rectags', 'group_localhost'},
)
@unittest.skipIf(
'TRAVIS_TAG' in os.environ,
'objcopy version does not support --dump-section yet',
)
def test_buildinfo(self):
node = 'node01'
tag = '*'
campaign_file = TestHostDriver.CAMPAIGN_FILE
with mkdtemp() as test_dir, pushd(test_dir):
bench = BuildInfoBench()
BenchmarkCategoryDriver(
BenchmarkDriver(
BenchmarkTagDriver(
HostDriver(CampaignDriver(campaign_file, node=node), node), tag
),
bench,
FakeBenchmark.DEFAULT_BENCHMARK_NAME,
dict(),
),
'main',
)()
metas = bench.execution_matrix(None)[0]['metas']
build_info = metas.get('build_info')
self.assertEqual(build_info, bench.build_info)
def slurm(self, **kwargs):
node = kwargs.get('node', 'node01')
tag = kwargs.get('tag', 'group_nodes')
srun_nodes = kwargs.get('srun_nodes', 1)
benchmark_config = kwargs.get('benchmark_config')
srun = benchmark_config.get('srun') if benchmark_config else None
command = Command(execution=dict(command=['ls', '-la']), srun=srun)
campaign_file = TestHostDriver.CAMPAIGN_FILE
if srun_nodes is not None:
command.execution.update(srun_nodes=srun_nodes)
return SrunExecutionDriver(
FixedAttempts(
BenchmarkCategoryDriver(
BenchmarkDriver(
BenchmarkTagDriver(
HostDriver(CampaignDriver(campaign_file, node=node), node),
tag,
),
namedtuple('benchmark', ['name'])(name='benchmark'),
FakeBenchmark.DEFAULT_BENCHMARK_NAME,
benchmark_config or dict(),
),
'category',
),
command,
)
)
def test_slurm_constraint(self):
"""SLURM --constraint option disables node name resolution"""
slurm = self.slurm(benchmark_config=dict(srun=dict(constraint="uc1*6|uc2*6")))
os.environ['SRUN'] = 'true' # otherwise `find_executable` crashes
six.assertCountEqual(
self, slurm.command, ['true', "--constraint='uc1*6|uc2*6'", 'ls', '-la']
)
os.environ.pop('SRUN')
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.TEST_DIR)
@cached_property
def network(self):
return TestHostDriver.DRIVER.network
def test_srun_nodes_method(self):
self.assertEqual(
self.slurm(node='node03', srun_nodes=0).srun_nodes,
['node01', 'node02', 'node03'],
)
self.assertEqual(self.slurm(node='node01', srun_nodes=1).srun_nodes, ['node01'])
self.assertEqual(self.slurm(node='node02', srun_nodes=1).srun_nodes, ['node02'])
self.assertEqual(
self.slurm(node='node01', srun_nodes=2).srun_nodes, ['node01', 'node02']
)
self.assertEqual(
self.slurm(node='node02', srun_nodes=2).srun_nodes, ['node02', 'node03']
)
self.assertEqual(
self.slurm(node='node03', srun_nodes=2).srun_nodes, ['node03', 'node01']
)
self.assertEqual(
self.slurm(node='node03', srun_nodes='group_match').srun_nodes, ['node10']
)
self.assertEqual(
self.slurm(srun_nodes='*').srun_nodes,
['node{0:02}'.format(id_) for id_ in range(1, 11)],
)
def test_srun_nodes_method_errors(self):
negative_srun_nodes = self.slurm(node='node03', srun_nodes=-1)
with self.assertRaises(AssertionError):
self.assertIsNotNone(negative_srun_nodes.srun_nodes)
host_not_in_tag = self.slurm(node='node04')
with self.assertRaises(ValueError):
self.assertIsNotNone(host_not_in_tag.srun_nodes)
unknown_tag = self.slurm(srun_nodes='unknown_tag')
with self.assertRaises(ValueError):
self.assertIsNotNone(unknown_tag.srun_nodes)
too_many_nodes = self.slurm(srun_nodes=4)
with self.assertRaises(AssertionError):
self.assertIsNotNone(too_many_nodes.srun_nodes)
def test_nodes_method(self):
self.assertEqual(
self.network.nodes('group_nodes'), ['node01', 'node02', 'node03']
)
self.assertEqual(self.network.nodes('group_match'), ['node10'])
self.assertEqual(self.network.nodes('n01'), ['node01'])
self.assertEqual(
self.network.nodes('*'), ['node{0:02}'.format(id_) for id_ in range(1, 11)]
)
self.assertEqual(self.network.nodes('unknown_group'), [])
| mit | -7,642,928,388,993,798,000 | 36.079422 | 88 | 0.583974 | false |
ryfeus/lambda-packs | Keras_tensorflow_nightly/source2.7/tensorflow/contrib/distribute/python/prefetching_ops_v2.py | 2 | 6738 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Extension of prefetching_ops to support more than one device."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import warnings
from tensorflow.contrib.data.python.ops import contrib_op_loader # pylint: disable=unused-import
from tensorflow.contrib.data.python.ops import gen_dataset_ops
from tensorflow.contrib.data.python.ops import prefetching_ops
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.data.util import nest as data_nest
from tensorflow.python.data.util import sparse
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import function
from tensorflow.python.framework import ops
from tensorflow.python.util import nest
# pylint: disable=protected-access
class _PrefetchToDeviceIterator(object):
"""A replacement for @{tf.data.Iterator} that prefetches to another device."""
def __init__(self, input_dataset, devices, buffer_size):
self._input_dataset = input_dataset
self._get_next_call_count = 0
self._devices = devices
input_iterator = input_dataset.make_one_shot_iterator()
input_iterator_handle = input_iterator.string_handle()
@function.Defun(dtypes.string)
def _prefetch_fn(handle):
"""Prefetches one element from `input_iterator`."""
remote_iterator = iterator_ops.Iterator.from_string_handle(
handle, input_iterator.output_types, input_iterator.output_shapes,
input_iterator.output_classes)
ret = remote_iterator.get_next()
return nest.flatten(sparse.serialize_sparse_tensors(ret))
target_device = gen_dataset_ops.iterator_get_device(
input_iterator._iterator_resource)
self._buffering_resources = []
for device in nest.flatten(self._devices):
with ops.device(device):
buffer_resource_handle = prefetching_ops.function_buffering_resource(
f=_prefetch_fn,
target_device=target_device,
string_arg=input_iterator_handle,
buffer_size=buffer_size)
self._buffering_resources.append(buffer_resource_handle)
def get_next(self, name=None):
"""See @{tf.data.Iterator.get_next}."""
self._get_next_call_count += 1
if self._get_next_call_count > iterator_ops.GET_NEXT_CALL_WARNING_THRESHOLD:
warnings.warn(iterator_ops.GET_NEXT_CALL_WARNING_MESSAGE)
flat_result = []
# TODO(priyag): This will fail if the input size (typically number of
# batches) is not divisible by number of devices.
# How do we handle that more gracefully / let the user know?
for buffer_resource in self._buffering_resources:
flat_ret = gen_dataset_ops.function_buffering_resource_get_next(
buffer_resource,
output_types=data_nest.flatten(sparse.as_dense_types(
self.output_types, self.output_classes)), name=name)
ret = sparse.deserialize_sparse_tensors(
data_nest.pack_sequence_as(self.output_types, flat_ret),
self.output_types, self.output_shapes, self.output_classes)
for tensor, shape in zip(
data_nest.flatten(ret), data_nest.flatten(self.output_shapes)):
if isinstance(tensor, ops.Tensor):
tensor.set_shape(shape)
flat_result.append(ret)
return nest.pack_sequence_as(self._devices, flat_result)
@property
def output_classes(self):
return self._input_dataset.output_classes
@property
def output_shapes(self):
return self._input_dataset.output_shapes
@property
def output_types(self):
return self._input_dataset.output_types
# pylint: enable=protected-access
class _PrefetchToDeviceDataset(dataset_ops.Dataset):
"""A `Dataset` whose iterator prefetches elements to other device(s)."""
def __init__(self, input_dataset, devices, buffer_size):
self._input_dataset = input_dataset
self._devices = devices
self._buffer_size = buffer_size if buffer_size is not None else 1
def make_one_shot_iterator(self):
return _PrefetchToDeviceIterator(self._input_dataset, self._devices,
self._buffer_size)
def make_initializable_iterator(self, shared_name=None):
raise NotImplementedError("`prefetch_to_devices()` is not currently "
"compatible with initializable iterators. Use "
"`make_one_shot_iterator()` instead.")
def _as_variant_tensor(self):
# TODO(mrry): Raise this error earlier (e.g. when one of the Dataset
# transformation methods is called.
# TODO(mrry): Investigate support for chaining further transformations after
# the prefetch, including GPU support.
raise NotImplementedError("`prefetch_to_devices()` must be the last "
"transformation in a dataset pipeline.")
# TODO(priyag): Fix the output types, shapes and classes to match the result
# of get_next (which has the additional nesting layer of devices now).
@property
def output_types(self):
return self._input_dataset.output_types
@property
def output_shapes(self):
return self._input_dataset.output_shapes
@property
def output_classes(self):
return self._input_dataset.output_classes
def prefetch_to_devices(devices, buffer_size=None):
"""A transformation that prefetches dataset values to the given `devices`.
NOTE: Although the transformation creates a @{tf.data.Dataset}, the
transformation must be the final `Dataset` in the input pipeline.
Args:
devices: A nested structure of devices on which to prefetch the data. It can
be a single device name, or a tuple or list of device names.
buffer_size: (Optional.) The number of elements to buffer on each device.
Defaults to an automatically chosen value.
Returns:
A `Dataset` transformation function, which can be passed to
@{tf.data.Dataset.apply}.
"""
def _apply_fn(dataset):
return _PrefetchToDeviceDataset(dataset, devices, buffer_size)
return _apply_fn
| mit | 4,795,807,771,898,680,000 | 39.107143 | 97 | 0.702137 | false |
Alpistinho/FreeCAD | src/Mod/PartDesign/fcgear/fcgear.py | 18 | 3528 | # (c) 2014 David Douard <david.douard@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (LGPL)
# as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
# for detail see the LICENCE text file.
#
# FCGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with FCGear; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
from math import cos, sin, pi, acos, asin, atan, sqrt
import FreeCAD, FreeCADGui, Part
from FreeCAD import Base, Console
import involute
reload(involute)
rotate = involute.rotate
def makeGear(m, Z, angle, split=True):
if FreeCAD.ActiveDocument is None:
FreeCAD.newDocument("Gear")
doc = FreeCAD.ActiveDocument
w = FCWireBuilder()
involute.CreateExternalGear(w, m, Z, angle, split)
gearw = Part.Wire([o.toShape() for o in w.wire])
gear = doc.addObject("Part::Feature", "Gear")
gear.Shape = gearw
return gear
class FCWireBuilder(object):
"""A helper class to prepare a Part.Wire object"""
def __init__(self):
self.pos = None
self.theta = 0.0
self.wire = []
def move(self, p):
"""set current position"""
self.pos = Base.Vector(*p)
def line(self, p):
"""Add a segment between self.pos and p"""
p = rotate(p, self.theta)
end = Base.Vector(*p)
self.wire.append(Part.Line(self.pos, end))
self.pos = end
def arc(self, p, r, sweep):
""""Add an arc from self.pos to p which radius is r
sweep (0 or 1) determine the orientation of the arc
"""
p = rotate(p, self.theta)
end = Base.Vector(*p)
mid = Base.Vector(*(midpoints(p, self.pos, r)[sweep]))
self.wire.append(Part.Arc(self.pos, mid, end))
self.pos = end
def curve(self, *points):
"""Add a Bezier curve from self.pos to points[-1]
every other points are the control points of the Bezier curve (which
will thus be of degree len(points) )
"""
points = [Base.Vector(*rotate(p, self.theta)) for p in points]
bz = Part.BezierCurve()
bz.setPoles([self.pos] + points)
self.wire.append(bz)
self.pos = points[-1]
def close(self):
pass
def midpoints(p1, p2, r):
"""A very ugly function that returns the midpoint of a p1 and p2
on the circle which radius is r and which pass throught p1 and
p2
Return the 2 possible solutions
"""
vx, vy = p2[0]-p1[0], p2[1]-p1[1]
b = (vx**2 + vy**2)**.5
v = (vx/b, vy/b)
cosA = b**2 / (2*b*r)
A = acos(cosA)
vx, vy = rotate(v, A)
c1 = (p1[0]+r*vx, p1[1]+r*vy)
m1x, m1y = ((p1[0]+p2[0])/2 - c1[0], (p1[1]+p2[1])/2 - c1[1])
dm1 = (m1x**2+m1y**2)**.5
m1x, m1y = (c1[0] + r*m1x/dm1, c1[1] + r*m1y/dm1)
m1 = (m1x, m1y)
vx, vy = rotate(v, -A)
c2 = (p1[0]+r*vx, p1[1]+r*vy)
m2x, m2y = ((p1[0]+p2[0])/2 - c2[0], (p1[1]+p2[1])/2 - c2[1])
dm2 = (m2x**2+m2y**2)**.5
m2x, m2y = (c2[0] + r*m2x/dm2, c2[1] + r*m2y/dm2)
m2 = (m2x, m2y)
return m1, m2
| lgpl-2.1 | -3,244,727,046,546,419,700 | 31.666667 | 76 | 0.603741 | false |
kamotos/factory_boy | tests/test_mongoengine.py | 1 | 2393 | # -*- coding: utf-8 -*-
# Copyright: See the LICENSE file.
"""Tests for factory_boy/MongoEngine interactions."""
import factory
import os
from .compat import unittest
try:
import mongoengine
except ImportError:
mongoengine = None
if os.environ.get('SKIP_MONGOENGINE') == '1':
mongoengine = None
if mongoengine:
from factory.mongoengine import MongoEngineFactory
class Address(mongoengine.EmbeddedDocument):
street = mongoengine.StringField()
class Person(mongoengine.Document):
name = mongoengine.StringField()
address = mongoengine.EmbeddedDocumentField(Address)
class AddressFactory(MongoEngineFactory):
class Meta:
model = Address
street = factory.Sequence(lambda n: 'street%d' % n)
class PersonFactory(MongoEngineFactory):
class Meta:
model = Person
name = factory.Sequence(lambda n: 'name%d' % n)
address = factory.SubFactory(AddressFactory)
@unittest.skipIf(mongoengine is None, "mongoengine not installed.")
class MongoEngineTestCase(unittest.TestCase):
db_name = os.environ.get('MONGO_DATABASE', 'factory_boy_test')
db_host = os.environ.get('MONGO_HOST', 'localhost')
db_port = int(os.environ.get('MONGO_PORT', '27017'))
server_timeout_ms = int(os.environ.get('MONGO_TIMEOUT', '300'))
@classmethod
def setUpClass(cls):
from pymongo import read_preferences as mongo_rp
cls.db = mongoengine.connect(
db=cls.db_name,
host=cls.db_host,
port=cls.db_port,
# PyMongo>=2.1 requires an explicit read_preference.
read_preference=mongo_rp.ReadPreference.PRIMARY,
# PyMongo>=2.1 has a 20s timeout, use 100ms instead
serverselectiontimeoutms=cls.server_timeout_ms,
)
@classmethod
def tearDownClass(cls):
cls.db.drop_database(cls.db_name)
def setUp(self):
mongoengine.connect('factory_boy_test')
def test_build(self):
std = PersonFactory.build()
self.assertEqual('name0', std.name)
self.assertEqual('street0', std.address.street)
self.assertIsNone(std.id)
def test_creation(self):
std1 = PersonFactory.create()
self.assertEqual('name1', std1.name)
self.assertEqual('street1', std1.address.street)
self.assertIsNotNone(std1.id)
| mit | -5,061,643,175,586,691,000 | 28.54321 | 67 | 0.65608 | false |
cathalmccabe/PYNQ | pynq/lib/pmod/pmod.py | 4 | 5821 | # Copyright (c) 2016, Xilinx, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION). HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import asyncio
import os
import sys
import math
from pynq.lib import PynqMicroblaze
from pynq.lib.pynqmicroblaze import add_bsp
from . import MAILBOX_OFFSET
from . import MAILBOX_PY2IOP_CMD_OFFSET
from . import BIN_LOCATION
from . import BSP_LOCATION
__author__ = "Yun Rock Qu"
__copyright__ = "Copyright 2016, Xilinx"
__email__ = "yunq@xilinx.com"
class Pmod(PynqMicroblaze):
"""This class controls the Pmod Microblaze instances in the system.
This class inherits from the PynqMicroblaze class. It extends
PynqMicroblaze with capability to control Pmod devices.
Attributes
----------
ip_name : str
The name of the IP corresponding to the Microblaze.
rst_name : str
The name of the reset pin for the Microblaze.
mb_program : str
The absolute path of the Microblaze program.
state : str
The status (IDLE, RUNNING, or STOPPED) of the Microblaze.
reset_pin : GPIO
The reset pin associated with the Microblaze.
mmio : MMIO
The MMIO instance associated with the Microblaze.
interrupt : Event
An asyncio.Event-like class for waiting on and clearing interrupts.
"""
def __init__(self, mb_info, mb_program):
"""Create a new Microblaze object.
This method leverages the initialization method of its parent. It
also deals with relative / absolute path of the program.
Parameters
----------
mb_info : dict
A dictionary storing Microblaze information, such as the
IP name and the reset name.
mb_program : str
The Microblaze program loaded for the processor.
Examples
--------
The `mb_info` is a dictionary storing Microblaze information:
>>> mb_info = {'ip_name': 'mb_bram_ctrl_1',
'rst_name': 'mb_reset_1',
'intr_pin_name': 'iop1/dff_en_reset_0/q',
'intr_ack_name': 'mb_1_intr_ack'}
"""
if not os.path.isabs(mb_program):
mb_program = os.path.join(BIN_LOCATION, mb_program)
super().__init__(mb_info, mb_program)
def write_mailbox(self, data_offset, data):
"""This method write data into the mailbox of the Microblaze.
Parameters
----------
data_offset : int
The offset for mailbox data, 0,4,... for MAILBOX 0,1,...
data : int/list
A list of 32b words to be written into the mailbox.
Returns
-------
None
"""
offset = MAILBOX_OFFSET + data_offset
self.write(offset, data)
def read_mailbox(self, data_offset, num_words=1):
"""This method reads mailbox data from the Microblaze.
Parameters
----------
data_offset : int
The offset for mailbox data, 0,4,... for MAILBOX 0,1,...
num_words : int
Number of 32b words to read from Microblaze mailbox.
Returns
-------
int/list
An int of a list of data read from the mailbox.
"""
offset = MAILBOX_OFFSET + data_offset
return self.read(offset, num_words)
def write_blocking_command(self, command):
"""This method writes a blocking command to the Microblaze.
The program waits in the loop until the command is cleared by the
Microblaze.
Parameters
----------
command : int
The command to write to the Microblaze.
Returns
-------
None
"""
self.write(MAILBOX_OFFSET + MAILBOX_PY2IOP_CMD_OFFSET, command)
while self.read(MAILBOX_OFFSET + MAILBOX_PY2IOP_CMD_OFFSET) != 0:
pass
def write_non_blocking_command(self, command):
"""This method writes a non-blocking command to the Microblaze.
The program will just send the command and returns the control
immediately.
Parameters
----------
command : int
The command to write to the Microblaze.
Returns
-------
None
"""
self.write(MAILBOX_OFFSET + MAILBOX_PY2IOP_CMD_OFFSET, command)
if os.path.exists(BSP_LOCATION):
add_bsp(BSP_LOCATION)
| bsd-3-clause | 6,246,514,693,235,974,000 | 31.887006 | 79 | 0.640096 | false |
lmazuel/azure-sdk-for-python | azure-mgmt-resource/azure/mgmt/resource/resources/v2017_05_10/models/sku.py | 4 | 1528 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Sku(Model):
"""SKU for the resource.
:param name: The SKU name.
:type name: str
:param tier: The SKU tier.
:type tier: str
:param size: The SKU size.
:type size: str
:param family: The SKU family.
:type family: str
:param model: The SKU model.
:type model: str
:param capacity: The SKU capacity.
:type capacity: int
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
'size': {'key': 'size', 'type': 'str'},
'family': {'key': 'family', 'type': 'str'},
'model': {'key': 'model', 'type': 'str'},
'capacity': {'key': 'capacity', 'type': 'int'},
}
def __init__(self, name=None, tier=None, size=None, family=None, model=None, capacity=None):
super(Sku, self).__init__()
self.name = name
self.tier = tier
self.size = size
self.family = family
self.model = model
self.capacity = capacity
| mit | -6,502,103,743,945,068,000 | 30.833333 | 96 | 0.532068 | false |
hejuna/bite-project | server/models/bite_suite.py | 17 | 10446 | # Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bite suite model.
Bite Suite model is one of the main meat of BITE models. Users could add
a list of tests to a suite, and then configure the suite from various of
aspects like watchdog job, reports, dimensions, retry logic, etc.
"""
__author__ = 'phu@google.com (Po Hu)'
import datetime
import logging
from google.appengine.ext import db
from models import bite_event
from models import bite_project
from utils import basic_util
DEFAULT_SUITE_TIMEOUT = 9999
DEFAULT_AUTO_DELETE_DEADLINE = 9999
class Error(Exception):
pass
class MissingSuiteNameError(Error):
"""Misses the suite name."""
class DuplicatedSuiteNameError(Error):
"""Has the suite name existing under project."""
class MissingProjectError(Error):
"""Misses the project name."""
class BiteSuite(db.Model):
"""Contains a group of tests as well as configurations."""
name = db.StringProperty(required=True)
visible = db.BooleanProperty(required=True, default=True)
description = db.TextProperty(required=False)
# project = db.ReferenceProperty(bite_project.BiteProject)
labels = db.StringListProperty(default=None)
created_by = db.UserProperty(required=False, auto_current_user_add=True)
created_time = db.DateTimeProperty(required=False, auto_now_add=True)
configs = db.TextProperty(required=False) # Used to override test's config.
# Either interval or concrete time.
watchdog_setting = db.StringProperty(required=False)
latest_version_url = db.LinkProperty(required=False)
# Includes sender, recipient, pass rate, etc info.
report_setting = db.TextProperty(required=False)
retry_times = db.IntegerProperty(required=False)
default_timeout = db.IntegerProperty(required=False)
# In how long should the job be auto deleted if not executed.
auto_delete_deadline = db.IntegerProperty(required=False)
reminder_setting = db.TextProperty(required=False)
last_modified_time = db.DateTimeProperty(required=False, auto_now=True)
last_modified_by = db.UserProperty(required=False, auto_current_user=True)
tests_number = db.IntegerProperty(required=False)
test_source = db.StringProperty(required=False,
choices=('acc'))
test_src_dict = db.TextProperty(required=False)
def ParseWatchdogSetting(watchdog_setting):
"""Parses the watchdog settings and returns the interval in mins."""
watchdog_setting_obj = basic_util.ParseJsonStr(watchdog_setting)
if (isinstance(watchdog_setting_obj, dict) and
watchdog_setting_obj.has_key('every')):
return int(watchdog_setting_obj['every'])
else:
return 0
def LoadSuite(name, project_name, suite_key_str=''):
"""Loads a bite suite."""
if suite_key_str:
return BiteSuite.get(db.Key(suite_key_str))
if not name:
raise MissingSuiteNameError('There is no suite name defined.')
if not project_name:
raise MissingProjectError('No project name was given.')
suite_key = db.Key.from_path(bite_project.BiteProject.kind(),
project_name, 'BiteSuite', name)
return BiteSuite.get(suite_key)
def LoadAllSuitesOfProjects(project_names=None):
"""Loads all the suites of the given projects."""
if project_names is None:
projects = bite_project.GetAllProjects()
project_names = [project.name for project in projects]
suites = []
for project_name in project_names:
suites.extend(LoadAllSuitesOfProject(project_name))
return suites
def LoadAllSuitesOfProject(project_name):
"""Loads all of the suites of a project."""
project_key = db.Key.from_path(bite_project.BiteProject.kind(),
project_name)
return BiteSuite.all().ancestor(project_key)
def GetSuiteWatchdogStr(watchdog_setting, interval):
"""Creates a watchdog setting string to save."""
if not watchdog_setting:
watchdog_setting = {}
if interval:
watchdog_setting['every'] = interval
return basic_util.DumpJsonStr(watchdog_setting)
def GetSuiteConfigStr(configs, tokens, start_url=''):
"""Creates a suite config str."""
if not configs:
configs = {}
if isinstance(configs, str):
configs = basic_util.ParseJsonStr(configs)['configs']
configs['tokens'] = tokens
configs['start_url'] = start_url
return basic_util.DumpJsonStr({'configs': configs})
def GetSuiteReportStr(report, email_from, email_to, failure_thresh):
"""Creates a suite report str."""
if not report:
report = {}
if isinstance(report, str):
report = basic_util.ParseJsonStr(report)['report']
report['email_from'] = email_from
report['email_to'] = email_to
report['failure_thresh'] = failure_thresh
return basic_util.DumpJsonStr({'report': report})
def ParseReportStr(report_str):
"""Parses the report string.
The report string in Json format should be:
{'report': {'name1': 'value1',
...}}
Args:
report_str: A Json format report string.
Returns:
A dict of report info.
"""
if not report_str:
return {}
report_obj = basic_util.ParseJsonStr(report_str)
return report_obj['report']
def ParseConfigStr(configs_str):
"""Parses the configs string.
The configs string in Json format should be:
{'configs': {'name1': 'value1',
...}}
Args:
configs_str: A Json format configs string.
Returns:
A dict of configs.
"""
if not configs_str:
return {}
configs_obj = basic_util.ParseJsonStr(configs_str)
return configs_obj['configs']
def GetSuiteTokens(suite):
"""Gets the tokens of the given suite."""
try:
return GetSuiteAttribute(suite, 'configs', 'tokens')
except:
return ''
def GetStartUrl(suite):
"""Gets the start url of the given suite."""
return GetSuiteAttribute(suite, 'configs', 'start_url')
def GetSuiteAttribute(suite, prop, attr):
"""Gets the attribute of the given suite."""
if isinstance(suite, unicode) or isinstance(suite, str):
suite = BiteSuite.get(db.Key(suite))
prop_obj = {}
if prop == 'configs':
prop_obj = ParseConfigStr(str(suite.configs))
elif prop == 'watchdog_setting':
return ParseWatchdogSetting(suite.watchdog_setting)
elif prop == 'report_setting':
prop_obj = ParseReportStr(str(suite.report_setting))
result = ''
if prop_obj.has_key(attr):
result = prop_obj[attr]
return result
def CheckSuiteExists(suite_name, project_name):
"""Checks if the suite exists or not."""
suite_key = db.Key.from_path(bite_project.BiteProject.kind(),
project_name, 'BiteSuite', suite_name)
if BiteSuite.get(suite_key):
return True
else:
return False
def UpdateSuite(name, project_name, description='', labels=None, configs='',
watchdog_setting='', latest_version_url=None, report_setting='',
retry_times=0, default_timeout=DEFAULT_SUITE_TIMEOUT,
auto_delete_deadline=DEFAULT_AUTO_DELETE_DEADLINE,
reminder_setting='', tests_num=0,
test_source='', test_src_dict=''):
"""Updates the given suite."""
suite_key = db.Key.from_path(bite_project.BiteProject.kind(),
project_name, 'BiteSuite', name)
suite = BiteSuite.get(suite_key)
suite.name = name
suite.description = description
suite.labels = labels
suite.configs = configs
suite.watchdog_setting = watchdog_setting
suite.latest_version_url = latest_version_url
suite.report_setting = report_setting
suite.retry_times = retry_times
suite.default_timeout = default_timeout
suite.auto_delete_deadline = auto_delete_deadline
suite.reminder_setting = reminder_setting
suite.tests_number = tests_num
suite.test_source = test_source
suite.test_src_dict = test_src_dict
suite.put()
bite_event.AddEvent(suite, action='modify', event_type='set',
name=suite.name, labels=suite.labels,
project=suite.parent().name)
return suite
def AddSuite(name, project_name, description='', labels=None, configs='',
watchdog_setting='', latest_version_url=None, report_setting='',
retry_times=0, default_timeout=DEFAULT_SUITE_TIMEOUT,
auto_delete_deadline=DEFAULT_AUTO_DELETE_DEADLINE,
reminder_setting='', tests_num=0,
test_source='', test_src_dict=''):
"""Adds a bite suite."""
if not name:
raise MissingSuiteNameError('There is no suite name defined.')
if not project_name:
raise MissingProjectError('No project name was given.')
project_key = db.Key.from_path(bite_project.BiteProject.kind(),
project_name)
if CheckSuiteExists(name, project_name):
raise DuplicatedSuiteNameError('Duplicated suite name.')
# Assume name is ascii.
suite = BiteSuite.get_or_insert(str(name),
parent=project_key,
name=name,
description=description,
labels=labels or [],
configs=configs,
watchdog_setting=watchdog_setting,
latest_version_url=latest_version_url,
report_setting=report_setting,
retry_times=retry_times,
default_timeout=default_timeout,
auto_delete_deadline=auto_delete_deadline,
reminder_setting=reminder_setting,
tests_number=tests_num,
test_source=test_source,
test_src_dict=test_src_dict)
bite_event.AddEvent(suite, action='create', event_type='set',
name=suite.name, labels=suite.labels,
project=suite.parent().name)
return suite
| apache-2.0 | -4,043,245,308,285,785,000 | 33.361842 | 80 | 0.663508 | false |
codyparker/channels-obstruction | game/views/views.py | 1 | 3489 | from django.contrib.auth.forms import PasswordResetForm
from django.shortcuts import redirect
from django.views.generic import CreateView, TemplateView, View, FormView
from django.contrib.auth import authenticate, login
from game.forms import *
from game.models import User, Game
from django.contrib.auth.forms import AuthenticationForm, UserCreationForm
from django.contrib.auth import REDIRECT_FIELD_NAME, login as auth_login, logout as auth_logout
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.contrib import messages
import json
from django.contrib.auth import get_user
from django.shortcuts import get_object_or_404
class HomeView(TemplateView):
template_name = 'home.html'
def dispatch(self, request, *args, **kwargs):
# if logged in, send them to the lobby
if request.user.is_authenticated:
return redirect('/lobby/')
super(HomeView, self).dispatch(request, *args, **kwargs)
class CreateUserView(CreateView):
template_name = 'register.html'
form_class = UserCreationForm
success_url = '/lobby/'
def form_valid(self, form):
valid = super(CreateUserView, self).form_valid(form)
username, password = form.cleaned_data.get('username'), form.cleaned_data.get('password1')
new_user = authenticate(username=username, password=password)
login(self.request, new_user)
return valid
class LobbyView(TemplateView):
template_name = 'components/lobby/lobby.html'
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
return super(LobbyView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(LobbyView, self).get_context_data(**kwargs)
# get current open games to prepopulate the list
# we're creating a list of games that contains just the id (for the link) and the creator
available_games = [{'creator': game.creator.username, 'id': game.pk} for game in Game.get_available_games()]
# for the player's games, we're returning a list of games with the opponent and id
player_games = Game.get_games_for_player(self.request.user)
return context
class GameView(TemplateView):
template_name = 'components/game/game.html'
game = None
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
# get the game by the id
self.game = Game.get_by_id(kwargs['game_id'])
user = get_user(request)
# check to see if the game is open and available for this user
# if this player is the creator, just return
if self.game.creator == user or self.game.opponent == user:
return super(GameView, self).dispatch(request, *args, **kwargs)
# if there is no opponent and the game is not yet completed,
# set the opponent as this user
if not self.game.opponent and not self.game.completed:
self.game.opponent = user
self.game.save()
return super(GameView, self).dispatch(request, *args, **kwargs)
else:
messages.add_message(request, messages.ERROR, 'Sorry, the selected game is not available.')
return redirect('/lobby/')
def get_context_data(self, **kwargs):
context = super(GameView, self).get_context_data(**kwargs)
context['game'] = self.game
return context | mit | 8,649,726,474,509,742,000 | 39.114943 | 116 | 0.687876 | false |
vitorespindola/home-assistant | tests/test_component_device_sun_light_trigger.py | 3 | 3501 | """
tests.test_component_device_sun_light_trigger
~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests device sun light trigger component.
"""
# pylint: disable=too-many-public-methods,protected-access
import os
import unittest
import homeassistant.loader as loader
from homeassistant.const import CONF_PLATFORM
from homeassistant.components import (
device_tracker, light, sun, device_sun_light_trigger)
from helpers import (
get_test_home_assistant, ensure_sun_risen, ensure_sun_set,
trigger_device_tracker_scan)
KNOWN_DEV_PATH = None
def setUpModule(): # pylint: disable=invalid-name
""" Initalizes a Home Assistant server. """
global KNOWN_DEV_PATH
hass = get_test_home_assistant()
loader.prepare(hass)
KNOWN_DEV_PATH = hass.config.path(
device_tracker.KNOWN_DEVICES_FILE)
hass.stop()
with open(KNOWN_DEV_PATH, 'w') as fil:
fil.write('device,name,track,picture\n')
fil.write('DEV1,device 1,1,http://example.com/dev1.jpg\n')
fil.write('DEV2,device 2,1,http://example.com/dev2.jpg\n')
def tearDownModule(): # pylint: disable=invalid-name
""" Stops the Home Assistant server. """
os.remove(KNOWN_DEV_PATH)
class TestDeviceSunLightTrigger(unittest.TestCase):
""" Test the device sun light trigger module. """
def setUp(self): # pylint: disable=invalid-name
self.hass = get_test_home_assistant()
self.scanner = loader.get_component(
'device_tracker.test').get_scanner(None, None)
self.scanner.reset()
self.scanner.come_home('DEV1')
loader.get_component('light.test').init()
device_tracker.setup(self.hass, {
device_tracker.DOMAIN: {CONF_PLATFORM: 'test'}
})
light.setup(self.hass, {
light.DOMAIN: {CONF_PLATFORM: 'test'}
})
sun.setup(self.hass, {sun.DOMAIN: {sun.CONF_ELEVATION: 0}})
def tearDown(self): # pylint: disable=invalid-name
""" Stop down stuff we started. """
self.hass.stop()
def test_lights_on_when_sun_sets(self):
""" Test lights go on when there is someone home and the sun sets. """
device_sun_light_trigger.setup(
self.hass, {device_sun_light_trigger.DOMAIN: {}})
ensure_sun_risen(self.hass)
light.turn_off(self.hass)
self.hass.pool.block_till_done()
ensure_sun_set(self.hass)
self.hass.pool.block_till_done()
self.assertTrue(light.is_on(self.hass))
def test_lights_turn_off_when_everyone_leaves(self):
""" Test lights turn off when everyone leaves the house. """
light.turn_on(self.hass)
self.hass.pool.block_till_done()
device_sun_light_trigger.setup(
self.hass, {device_sun_light_trigger.DOMAIN: {}})
self.scanner.leave_home('DEV1')
trigger_device_tracker_scan(self.hass)
self.hass.pool.block_till_done()
self.assertFalse(light.is_on(self.hass))
def test_lights_turn_on_when_coming_home_after_sun_set(self):
""" Test lights turn on when coming home after sun set. """
light.turn_off(self.hass)
ensure_sun_set(self.hass)
self.hass.pool.block_till_done()
device_sun_light_trigger.setup(
self.hass, {device_sun_light_trigger.DOMAIN: {}})
self.scanner.come_home('DEV2')
trigger_device_tracker_scan(self.hass)
self.hass.pool.block_till_done()
self.assertTrue(light.is_on(self.hass))
| mit | -6,426,477,691,603,118,000 | 26.566929 | 78 | 0.635533 | false |
DailyActie/Surrogate-Model | 01-codes/scipy-master/scipy/stats/tests/test_tukeylambda_stats.py | 1 | 3336 | from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import assert_allclose, assert_equal, run_module_suite
from scipy.stats._tukeylambda_stats import tukeylambda_variance, \
tukeylambda_kurtosis
def test_tukeylambda_stats_known_exact():
"""Compare results with some known exact formulas."""
# Some exact values of the Tukey Lambda variance and kurtosis:
# lambda var kurtosis
# 0 pi**2/3 6/5 (logistic distribution)
# 0.5 4 - pi (5/3 - pi/2)/(pi/4 - 1)**2 - 3
# 1 1/3 -6/5 (uniform distribution on (-1,1))
# 2 1/12 -6/5 (uniform distribution on (-1/2, 1/2))
# lambda = 0
var = tukeylambda_variance(0)
assert_allclose(var, np.pi ** 2 / 3, atol=1e-12)
kurt = tukeylambda_kurtosis(0)
assert_allclose(kurt, 1.2, atol=1e-10)
# lambda = 0.5
var = tukeylambda_variance(0.5)
assert_allclose(var, 4 - np.pi, atol=1e-12)
kurt = tukeylambda_kurtosis(0.5)
desired = (5. / 3 - np.pi / 2) / (np.pi / 4 - 1) ** 2 - 3
assert_allclose(kurt, desired, atol=1e-10)
# lambda = 1
var = tukeylambda_variance(1)
assert_allclose(var, 1.0 / 3, atol=1e-12)
kurt = tukeylambda_kurtosis(1)
assert_allclose(kurt, -1.2, atol=1e-10)
# lambda = 2
var = tukeylambda_variance(2)
assert_allclose(var, 1.0 / 12, atol=1e-12)
kurt = tukeylambda_kurtosis(2)
assert_allclose(kurt, -1.2, atol=1e-10)
def test_tukeylambda_stats_mpmath():
"""Compare results with some values that were computed using mpmath."""
a10 = dict(atol=1e-10, rtol=0)
a12 = dict(atol=1e-12, rtol=0)
data = [
# lambda variance kurtosis
[-0.1, 4.78050217874253547, 3.78559520346454510],
[-0.0649, 4.16428023599895777, 2.52019675947435718],
[-0.05, 3.93672267890775277, 2.13129793057777277],
[-0.001, 3.30128380390964882, 1.21452460083542988],
[0.001, 3.27850775649572176, 1.18560634779287585],
[0.03125, 2.95927803254615800, 0.804487555161819980],
[0.05, 2.78281053405464501, 0.611604043886644327],
[0.0649, 2.65282386754100551, 0.476834119532774540],
[1.2, 0.242153920578588346, -1.23428047169049726],
[10.0, 0.00095237579757703597, 2.37810697355144933],
[20.0, 0.00012195121951131043, 7.37654321002709531],
]
for lam, var_expected, kurt_expected in data:
var = tukeylambda_variance(lam)
assert_allclose(var, var_expected, **a12)
kurt = tukeylambda_kurtosis(lam)
assert_allclose(kurt, kurt_expected, **a10)
# Test with vector arguments (most of the other tests are for single
# values).
lam, var_expected, kurt_expected = zip(*data)
var = tukeylambda_variance(lam)
assert_allclose(var, var_expected, **a12)
kurt = tukeylambda_kurtosis(lam)
assert_allclose(kurt, kurt_expected, **a10)
def test_tukeylambda_stats_invalid():
"""Test values of lambda outside the domains of the functions."""
lam = [-1.0, -0.5]
var = tukeylambda_variance(lam)
assert_equal(var, np.array([np.nan, np.inf]))
lam = [-1.0, -0.25]
kurt = tukeylambda_kurtosis(lam)
assert_equal(kurt, np.array([np.nan, np.inf]))
if __name__ == "__main__":
run_module_suite()
| mit | 7,781,818,567,026,962,000 | 36.066667 | 75 | 0.630995 | false |
semonte/intellij-community | python/helpers/pydev/_pydevd_frame_eval/pydevd_frame_tracing.py | 3 | 4322 | import sys
import traceback
from _pydev_bundle import pydev_log
from _pydev_imps._pydev_saved_modules import threading
from _pydevd_bundle.pydevd_comm import get_global_debugger, CMD_SET_BREAK
from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER
def update_globals_dict(globals_dict):
new_globals = {'_pydev_stop_at_break': _pydev_stop_at_break}
globals_dict.update(new_globals)
def handle_breakpoint(frame, thread, global_debugger, breakpoint):
# ok, hit breakpoint, now, we have to discover if it is a conditional breakpoint
new_frame = frame
condition = breakpoint.condition
info = thread.additional_info
if condition is not None:
try:
val = eval(condition, new_frame.f_globals, new_frame.f_locals)
if not val:
return False
except:
if type(condition) != type(''):
if hasattr(condition, 'encode'):
condition = condition.encode('utf-8')
msg = 'Error while evaluating expression: %s\n' % (condition,)
sys.stderr.write(msg)
traceback.print_exc()
if not global_debugger.suspend_on_breakpoint_exception:
return False
else:
try:
# add exception_type and stacktrace into thread additional info
etype, value, tb = sys.exc_info()
try:
error = ''.join(traceback.format_exception_only(etype, value))
stack = traceback.extract_stack(f=tb.tb_frame.f_back)
# On self.set_suspend(thread, CMD_SET_BREAK) this info will be
# sent to the client.
info.conditional_breakpoint_exception = \
('Condition:\n' + condition + '\n\nError:\n' + error, stack)
finally:
etype, value, tb = None, None, None
except:
traceback.print_exc()
if breakpoint.expression is not None:
try:
try:
val = eval(breakpoint.expression, new_frame.f_globals, new_frame.f_locals)
except:
val = sys.exc_info()[1]
finally:
if val is not None:
info.pydev_message = str(val)
if breakpoint.suspend_policy == "ALL":
global_debugger.suspend_all_other_threads(thread)
return True
def _get_line_for_frame(frame):
# it's absolutely necessary to reset tracing function for frame in order to get the real line number
tracing_func = frame.f_trace
frame.f_trace = None
line = frame.f_lineno
frame.f_trace = tracing_func
return line
def _pydev_stop_at_break():
frame = sys._getframe(1)
t = threading.currentThread()
if t.additional_info.is_tracing:
return
if t.additional_info.pydev_step_cmd == -1 and sys.gettrace() is None:
# do not handle breakpoints while stepping, because they're handled by old tracing function
t.additional_info.is_tracing = True
debugger = get_global_debugger()
try:
abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename]
except:
abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame)
filename = abs_path_real_path_and_base[1]
breakpoints_for_file = debugger.breakpoints.get(filename)
line = _get_line_for_frame(frame)
try:
breakpoint = breakpoints_for_file[line]
except KeyError:
pydev_log.debug("Couldn't find breakpoint in the file {} on line {}".format(frame.f_code.co_filename, line))
return
if breakpoint and handle_breakpoint(frame, t, debugger, breakpoint):
pydev_log.debug("Suspending at breakpoint in file: {} on line {}".format(frame.f_code.co_filename, line))
debugger.set_suspend(t, CMD_SET_BREAK)
debugger.do_wait_suspend(t, frame, 'line', None)
t.additional_info.is_tracing = False
def pydev_trace_code_wrapper():
# import this module again, because it's inserted inside user's code
global _pydev_stop_at_break
_pydev_stop_at_break()
| apache-2.0 | -6,613,420,800,787,674,000 | 37.247788 | 120 | 0.600879 | false |
nharraud/invenio-oaiharvester | invenio_oaiharvester/upgrades/oaiharvester_2015_07_14_innodb.py | 1 | 1535 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Fixes foreign key relationship."""
from invenio.ext.sqlalchemy import db
from invenio_upgrader.api import op
depends_on = ['invenio_2015_03_03_tag_value']
def info():
"""Return upgrade recipe information."""
return "Fixes foreign key relationship."
def do_upgrade():
"""Carry out the upgrade."""
op.alter_column(
table_name='oaiHARVESTLOG',
column_name='bibupload_task_id',
type_=db.MediumInteger(15, unsigned=True),
existing_nullable=False,
existing_server_default='0'
)
def estimate():
"""Estimate running time of upgrade in seconds (optional)."""
return 1
def pre_upgrade():
"""Pre-upgrade checks."""
pass
def post_upgrade():
"""Post-upgrade checks."""
pass
| gpl-2.0 | 3,099,105,697,547,080,000 | 25.465517 | 74 | 0.693811 | false |
spigwitmer/mysqlproxy | mysqlproxy/binary_protocol.py | 1 | 4250 | """
Binary protocol value handling
"""
from mysqlproxy.types import *
from mysqlproxy import column_types as coltypes
import struct
from datetime import datetime
def generate_binary_field_info(val, type_code):
"""
Returns a list of data types representing the value
`val` of type code `type_code` (see mysqlproxy/column_types.py)
"""
if type_code in [coltypes.STRING, coltypes.VARCHAR, coltypes.VAR_STRING,
coltypes.ENUM, coltypes.SET, coltypes.LONG_BLOB,
coltypes.MEDIUM_BLOB, coltypes.BLOB, coltypes.TINY_BLOB,
coltypes.GEOMETRY, coltypes.BIT, coltypes.DECIMAL,
coltypes.NEWDECIMAL]:
return [('str_val', LengthEncodedString(unicode(val)))]
elif type_code == coltypes.LONGLONG:
return [('uint64_val', FixedLengthInteger(8, val))]
elif type_code in [coltypes.LONG, coltypes.INT24]:
return [('uint32_val', FixedLengthInteger(4, val))]
elif type_code in [coltypes.SHORT, coltypes.YEAR]:
return [('uint16_val', FixedLengthInteger(2, val))]
elif type_code == coltypes.TINY:
return [('uint8_val', FixedLengthInteger(1, val))]
elif type_code == coltypes.DOUBLE:
return [('double_val', FixedLengthString(8, struct.pack('<d', float(val))))]
elif type_code == coltypes.FLOAT:
return [('float_val', FixedLengthString(4, struct.pack('<f', float(val))))]
elif type_code in [coltypes.DATE, coltypes.DATETIME, coltypes.TIMESTAMP]:
if type(val) in [tuple, list]:
# we sorta know what we're doing
try:
year, month, day, hour, minute, second, micro_second = val
except:
# not enough values in tuple/list, so uh, panic
raise ValueError('val for binary Datetime/Timestamp cannot be parsed')
elif type(val) == int:
# assume a UNIX timestamp
dt = datetime.fromtimestamp(val)
year, month, day, hour, minute, second, micro_second = \
dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.microsecond
else:
raise ValueError('val for binary Datetime/Timestamp cannot be parsed')
# TODO: 0-val optimizations, length doesn't have to be 11
return [
('packet_length', FixedLengthInteger(1, 11)),
('year', FixedLengthInteger(2, year)),
('month', FixedLengthInteger(1, month)),
('day', FixedLengthInteger(1, day)),
('hour', FixedLengthInteger(1, hour)),
('minute', FixedLengthInteger(1, minute)),
('second', FixedLengthInteger(1, second)),
('micro_second', FixedLengthInteger(4, micro_second))
]
elif type_code == coltypes.TIME:
# time delta
if type(val) not in [tuple, list]:
raise ValueError('Cannot parse val for TIME type from type %s', type(val))
# everything's an integer, right?
if reduce(lambda x, y: x+y, [int(type(x) != int) for x in val]) == 0:
raise ValueError('Cannot parse val for TIME type: non-integer value')
if len(val) == 5:
# is_negative implied by the sign of the first non-zero value
for v in val:
if val != 0:
is_negative = (1 if v < 0 else 0)
break
days, hours, minutes, seconds, micro_seconds = [abs(x) for x in list(val)]
elif len(val) == 6:
is_negative, days, hours, minutes, seconds, micro_seconds = val
is_negative = int(is_negative) # if a bool, convert it
else:
raise ValueError('val for TIME type is incomplete length (%d)' % len(val))
# TODO: again, 0-val optimizations
return [
('field_length', FixedLengthInteger(1, 12)),
('is_negative', FixedLengthInteger(1, is_negative)),
('days', FixedLengthInteger(4, days)),
('hours', FixedLengthInteger(1, hours)),
('minutes', FixedLengthInteger(1, minutes)),
('seconds', FixedLengthInteger(1, seconds)),
('micro_seconds', FixedLengthInteger(4, micro_seconds)),
]
else:
raise ValueError('Invalid column type (code: %d)' % type_code)
| bsd-3-clause | -2,209,202,731,825,847,600 | 44.698925 | 88 | 0.597647 | false |
stdlib-js/stdlib | lib/node_modules/@stdlib/math/base/special/spence/benchmark/python/scipy/benchmark.py | 1 | 2199 | #!/usr/bin/env python
#
# @license Apache-2.0
#
# Copyright (c) 2018 The Stdlib Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Benchmark scipy.special.spence."""
from __future__ import print_function
import timeit
NAME = "spence"
REPEATS = 3
ITERATIONS = 1000000
def print_version():
"""Print the TAP version."""
print("TAP version 13")
def print_summary(total, passing):
"""Print the benchmark summary.
# Arguments
* `total`: total number of tests
* `passing`: number of passing tests
"""
print("#")
print("1.." + str(total)) # TAP plan
print("# total " + str(total))
print("# pass " + str(passing))
print("#")
print("# ok")
def print_results(elapsed):
"""Print benchmark results.
# Arguments
* `elapsed`: elapsed time (in seconds)
# Examples
``` python
python> print_results(0.131009101868)
```
"""
rate = ITERATIONS / elapsed
print(" ---")
print(" iterations: " + str(ITERATIONS))
print(" elapsed: " + str(elapsed))
print(" rate: " + str(rate))
print(" ...")
def benchmark():
"""Run the benchmark and print benchmark results."""
setup = "from scipy.special import spence; from random import random;"
stmt = "y = spence(1000.0*random())"
t = timeit.Timer(stmt, setup=setup)
print_version()
for i in range(REPEATS):
print("# python::scipy::" + NAME)
elapsed = t.timeit(number=ITERATIONS)
print_results(elapsed)
print("ok " + str(i+1) + " benchmark finished")
print_summary(REPEATS, REPEATS)
def main():
"""Run the benchmark."""
benchmark()
if __name__ == "__main__":
main()
| apache-2.0 | -1,667,017,985,710,595,600 | 21.670103 | 74 | 0.631196 | false |
pshen/ansible | lib/ansible/modules/files/assemble.py | 35 | 9132 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Stephen Fromm <sfromm@gmail.com>
# (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: assemble
short_description: Assembles a configuration file from fragments
description:
- Assembles a configuration file from fragments. Often a particular
program will take a single configuration file and does not support a
C(conf.d) style structure where it is easy to build up the configuration
from multiple sources. C(assemble) will take a directory of files that can be
local or have already been transferred to the system, and concatenate them
together to produce a destination file. Files are assembled in string sorting order.
Puppet calls this idea I(fragments).
version_added: "0.5"
options:
src:
description:
- An already existing directory full of source files.
required: true
default: null
aliases: []
dest:
description:
- A file to create using the concatenation of all of the source files.
required: true
default: null
backup:
description:
- Create a backup file (if C(yes)), including the timestamp information so
you can get the original file back if you somehow clobbered it
incorrectly.
required: false
choices: [ "yes", "no" ]
default: "no"
delimiter:
description:
- A delimiter to separate the file contents.
version_added: "1.4"
required: false
default: null
remote_src:
description:
- If False, it will search for src at originating/master machine, if True it will
go to the remote/target machine for the src. Default is True.
choices: [ "True", "False" ]
required: false
default: "True"
version_added: "1.4"
regexp:
description:
- Assemble files only if C(regex) matches the filename. If not set,
all files are assembled. All "\\" (backslash) must be escaped as
"\\\\" to comply yaml syntax. Uses Python regular expressions; see
U(http://docs.python.org/2/library/re.html).
required: false
default: null
ignore_hidden:
description:
- A boolean that controls if files that start with a '.' will be included or not.
required: false
default: false
version_added: "2.0"
validate:
description:
- The validation command to run before copying into place. The path to the file to
validate is passed in via '%s' which must be present as in the sshd example below.
The command is passed securely so shell features like expansion and pipes won't work.
required: false
default: null
version_added: "2.0"
author: "Stephen Fromm (@sfromm)"
extends_documentation_fragment:
- files
- decrypt
'''
EXAMPLES = '''
# Example from Ansible Playbooks
- assemble:
src: /etc/someapp/fragments
dest: /etc/someapp/someapp.conf
# When a delimiter is specified, it will be inserted in between each fragment
- assemble:
src: /etc/someapp/fragments
dest: /etc/someapp/someapp.conf
delimiter: '### START FRAGMENT ###'
# Copy a new "sshd_config" file into place, after passing validation with sshd
- assemble:
src: /etc/ssh/conf.d/
dest: /etc/ssh/sshd_config
validate: '/usr/sbin/sshd -t -f %s'
'''
import codecs
import os
import os.path
import re
import tempfile
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.six import b
# ===========================================
# Support method
def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, ignore_hidden=False):
''' assemble a file from a directory of fragments '''
tmpfd, temp_path = tempfile.mkstemp()
tmp = os.fdopen(tmpfd, 'wb')
delimit_me = False
add_newline = False
for f in sorted(os.listdir(src_path)):
if compiled_regexp and not compiled_regexp.search(f):
continue
fragment = os.path.join(src_path, f)
if not os.path.isfile(fragment) or (ignore_hidden and os.path.basename(fragment).startswith('.')):
continue
fragment_content = open(fragment, 'rb').read()
# always put a newline between fragments if the previous fragment didn't end with a newline.
if add_newline:
tmp.write(b('\n'))
# delimiters should only appear between fragments
if delimit_me:
if delimiter:
# un-escape anything like newlines
delimiter = codecs.escape_decode(delimiter)[0]
tmp.write(delimiter)
# always make sure there's a newline after the
# delimiter, so lines don't run together
if delimiter[-1] != b('\n'):
tmp.write(b('\n'))
tmp.write(fragment_content)
delimit_me = True
if fragment_content.endswith(b('\n')):
add_newline = False
else:
add_newline = True
tmp.close()
return temp_path
def cleanup(path, result=None):
# cleanup just in case
if os.path.exists(path):
try:
os.remove(path)
except (IOError, OSError):
e = get_exception()
# don't error on possible race conditions, but keep warning
if result is not None:
result['warnings'] = ['Unable to remove temp file (%s): %s' % (path, str(e))]
def main():
module = AnsibleModule(
# not checking because of daisy chain to file module
argument_spec = dict(
src = dict(required=True, type='path'),
delimiter = dict(required=False),
dest = dict(required=True, type='path'),
backup=dict(default=False, type='bool'),
remote_src=dict(default=False, type='bool'),
regexp = dict(required=False),
ignore_hidden = dict(default=False, type='bool'),
validate = dict(required=False, type='str'),
),
add_file_common_args=True
)
changed = False
path_hash = None
dest_hash = None
src = module.params['src']
dest = module.params['dest']
backup = module.params['backup']
delimiter = module.params['delimiter']
regexp = module.params['regexp']
compiled_regexp = None
ignore_hidden = module.params['ignore_hidden']
validate = module.params.get('validate', None)
result = dict(src=src, dest=dest)
if not os.path.exists(src):
module.fail_json(msg="Source (%s) does not exist" % src)
if not os.path.isdir(src):
module.fail_json(msg="Source (%s) is not a directory" % src)
if regexp is not None:
try:
compiled_regexp = re.compile(regexp)
except re.error:
e = get_exception()
module.fail_json(msg="Invalid Regexp (%s) in \"%s\"" % (e, regexp))
if validate and "%s" not in validate:
module.fail_json(msg="validate must contain %%s: %s" % validate)
path = assemble_from_fragments(src, delimiter, compiled_regexp, ignore_hidden)
path_hash = module.sha1(path)
result['checksum'] = path_hash
# Backwards compat. This won't return data if FIPS mode is active
try:
pathmd5 = module.md5(path)
except ValueError:
pathmd5 = None
result['md5sum'] = pathmd5
if os.path.exists(dest):
dest_hash = module.sha1(dest)
if path_hash != dest_hash:
if validate:
(rc, out, err) = module.run_command(validate % path)
result['validation'] = dict(rc=rc, stdout=out, stderr=err)
if rc != 0:
cleanup(path)
module.fail_json(msg="failed to validate: rc:%s error:%s" % (rc, err))
if backup and dest_hash is not None:
result['backup_file'] = module.backup_local(dest)
module.atomic_move(path, dest, unsafe_writes=module.params['unsafe_writes'])
changed = True
cleanup(path, result)
# handle file permissions
file_args = module.load_file_common_arguments(module.params)
result['changed'] = module.set_fs_attributes_if_different(file_args, changed)
# Mission complete
result['msg'] = "OK"
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | -8,550,170,045,236,889,000 | 32.573529 | 106 | 0.632501 | false |
linvictor88/vse-lbaas-driver | quantum/plugins/cisco/nexus/cisco_nexus_network_driver_v2.py | 1 | 9336 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2011 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Debojyoti Dutta, Cisco Systems, Inc.
# @author: Edgar Magana, Cisco Systems Inc.
#
"""
Implements a Nexus-OS NETCONF over SSHv2 API Client
"""
import logging
from ncclient import manager
from quantum.plugins.cisco.common import cisco_exceptions as cexc
from quantum.plugins.cisco.db import network_db_v2 as cdb
from quantum.plugins.cisco.db import nexus_db_v2
from quantum.plugins.cisco.nexus import cisco_nexus_snippets as snipp
LOG = logging.getLogger(__name__)
class CiscoNEXUSDriver():
"""Nexus Driver Main Class."""
def __init__(self):
pass
def _edit_config(self, mgr, target='running', config='',
allowed_exc_strs=None):
"""Modify switch config for a target config type.
:param mgr: NetConf client manager
:param target: Target config type
:param config: Configuration string in XML format
:param allowed_exc_strs: Exceptions which have any of these strings
as a subset of their exception message
(str(exception)) can be ignored
:raises: NexusConfigFailed
"""
if not allowed_exc_strs:
allowed_exc_strs = []
try:
mgr.edit_config(target, config=config)
except Exception as e:
for exc_str in allowed_exc_strs:
if exc_str in str(e):
break
else:
# Raise a Quantum exception. Include a description of
# the original ncclient exception.
raise cexc.NexusConfigFailed(config=config, exc=e)
def nxos_connect(self, nexus_host, nexus_ssh_port, nexus_user,
nexus_password):
"""Make SSH connection to the Nexus Switch."""
try:
man = manager.connect(host=nexus_host, port=nexus_ssh_port,
username=nexus_user,
password=nexus_password)
except Exception as e:
# Raise a Quantum exception. Include a description of
# the original ncclient exception.
raise cexc.NexusConnectFailed(nexus_host=nexus_host, exc=e)
return man
def create_xml_snippet(self, cutomized_config):
"""Create XML snippet.
Creates the Proper XML structure for the Nexus Switch Configuration.
"""
conf_xml_snippet = snipp.EXEC_CONF_SNIPPET % (cutomized_config)
return conf_xml_snippet
def enable_vlan(self, mgr, vlanid, vlanname):
"""Create a VLAN on Nexus Switch given the VLAN ID and Name."""
confstr = self.create_xml_snippet(
snipp.CMD_VLAN_CONF_SNIPPET % (vlanid, vlanname))
self._edit_config(mgr, target='running', config=confstr)
# Enable VLAN active and no-shutdown states. Some versions of
# Nexus switch do not allow state changes for the extended VLAN
# range (1006-4094), but these errors can be ignored (default
# values are appropriate).
state_config = [snipp.CMD_VLAN_ACTIVE_SNIPPET,
snipp.CMD_VLAN_NO_SHUTDOWN_SNIPPET]
for snippet in state_config:
try:
confstr = self.create_xml_snippet(snippet % vlanid)
self._edit_config(
mgr,
target='running',
config=confstr,
allowed_exc_strs=["Can't modify state for extended",
"Command is only allowed on VLAN"])
except cexc.NexusConfigFailed as e:
# Rollback VLAN creation
try:
self.disable_vlan(mgr, vlanid)
finally:
# Re-raise original exception
raise e
def disable_vlan(self, mgr, vlanid):
"""Delete a VLAN on Nexus Switch given the VLAN ID."""
confstr = snipp.CMD_NO_VLAN_CONF_SNIPPET % vlanid
confstr = self.create_xml_snippet(confstr)
self._edit_config(mgr, target='running', config=confstr)
def enable_port_trunk(self, mgr, interface):
"""Enable trunk mode an interface on Nexus Switch."""
confstr = snipp.CMD_PORT_TRUNK % (interface)
confstr = self.create_xml_snippet(confstr)
LOG.debug(_("NexusDriver: %s"), confstr)
self._edit_config(mgr, target='running', config=confstr)
def disable_switch_port(self, mgr, interface):
"""Disable trunk mode an interface on Nexus Switch."""
confstr = snipp.CMD_NO_SWITCHPORT % (interface)
confstr = self.create_xml_snippet(confstr)
LOG.debug(_("NexusDriver: %s"), confstr)
self._edit_config(mgr, target='running', config=confstr)
def enable_vlan_on_trunk_int(self, mgr, nexus_switch, interface, vlanid):
"""Enable vlan in trunk interface.
Enables trunk mode vlan access an interface on Nexus Switch given
VLANID.
"""
# If one or more VLANs are already configured on this interface,
# include the 'add' keyword.
if nexus_db_v2.get_port_switch_bindings(interface, nexus_switch):
snippet = snipp.CMD_INT_VLAN_ADD_SNIPPET
else:
snippet = snipp.CMD_INT_VLAN_SNIPPET
confstr = snippet % (interface, vlanid)
confstr = self.create_xml_snippet(confstr)
LOG.debug(_("NexusDriver: %s"), confstr)
self._edit_config(mgr, target='running', config=confstr)
def disable_vlan_on_trunk_int(self, mgr, interface, vlanid):
"""Disable VLAN.
Disables trunk mode vlan access an interface on Nexus Switch given
VLANID.
"""
confstr = snipp.CMD_NO_VLAN_INT_SNIPPET % (interface, vlanid)
confstr = self.create_xml_snippet(confstr)
LOG.debug(_("NexusDriver: %s"), confstr)
self._edit_config(mgr, target='running', config=confstr)
def create_vlan(self, vlan_name, vlan_id, nexus_host, nexus_user,
nexus_password, nexus_ports,
nexus_ssh_port, vlan_ids=None):
"""Create VLAN and enablt in on the interface.
Creates a VLAN and Enable on trunk mode an interface on Nexus Switch
given the VLAN ID and Name and Interface Number.
"""
man = self.nxos_connect(nexus_host, int(nexus_ssh_port),
nexus_user, nexus_password)
self.enable_vlan(man, vlan_id, vlan_name)
if vlan_ids is '':
vlan_ids = self.build_vlans_cmd()
LOG.debug(_("NexusDriver VLAN IDs: %s"), vlan_ids)
for ports in nexus_ports:
self.enable_vlan_on_trunk_int(man, nexus_host, ports, vlan_ids)
def delete_vlan(self, vlan_id, nexus_host, nexus_user, nexus_password,
nexus_ports, nexus_ssh_port):
"""Delete vlan.
Delete a VLAN and Disables trunk mode an interface on Nexus Switch
given the VLAN ID and Interface Number.
"""
man = self.nxos_connect(nexus_host, int(nexus_ssh_port),
nexus_user, nexus_password)
self.disable_vlan(man, vlan_id)
for ports in nexus_ports:
self.disable_vlan_on_trunk_int(man, ports, vlan_id)
def build_vlans_cmd(self):
"""Builds a string with all the VLANs on the same Switch."""
assigned_vlan = cdb.get_all_vlanids_used()
vlans = ''
for vlanid in assigned_vlan:
vlans = str(vlanid["vlan_id"]) + ',' + vlans
if vlans == '':
vlans = 'none'
return vlans.strip(',')
def add_vlan_int(self, vlan_id, nexus_host, nexus_user, nexus_password,
nexus_ports, nexus_ssh_port, vlan_ids=None):
"""Add vlan.
Adds a vlan from interfaces on the Nexus switch given the VLAN ID.
"""
man = self.nxos_connect(nexus_host, int(nexus_ssh_port),
nexus_user, nexus_password)
if not vlan_ids:
vlan_ids = self.build_vlans_cmd()
for ports in nexus_ports:
self.enable_vlan_on_trunk_int(man, nexus_host, ports, vlan_ids)
def remove_vlan_int(self, vlan_id, nexus_host, nexus_user, nexus_password,
nexus_ports, nexus_ssh_port):
"""Remove vlan.
Removes a vlan from interfaces on the Nexus switch given the VLAN ID.
"""
man = self.nxos_connect(nexus_host, int(nexus_ssh_port),
nexus_user, nexus_password)
for ports in nexus_ports:
self.disable_vlan_on_trunk_int(man, ports, vlan_id)
| apache-2.0 | -4,213,485,335,659,307,500 | 39.768559 | 78 | 0.59865 | false |
sharad/calibre | src/calibre/web/feeds/__init__.py | 5 | 12377 | #!/usr/bin/env python
__license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
'''
Contains the logic for parsing feeds.
'''
import time, traceback, copy, re
from calibre.utils.logging import default_log
from calibre import entity_to_unicode, strftime
from calibre.utils.date import dt_factory, utcnow, local_tz
from calibre.utils.cleantext import clean_ascii_chars
class Article(object):
def __init__(self, id, title, url, author, summary, published, content):
from lxml import html
self.downloaded = False
self.id = id
self._title = title.strip() if title else title
try:
self._title = re.sub(r'&(\S+?);',
entity_to_unicode, self._title)
except:
pass
if not isinstance(self._title, unicode):
self._title = self._title.decode('utf-8', 'replace')
self._title = clean_ascii_chars(self._title)
self.url = url
self.author = author
self.toc_thumbnail = None
if author and not isinstance(author, unicode):
author = author.decode('utf-8', 'replace')
self.summary = summary
if summary and not isinstance(summary, unicode):
summary = summary.decode('utf-8', 'replace')
if summary and '<' in summary:
try:
s = html.fragment_fromstring(summary, create_parent=True)
summary = html.tostring(s, method='text', encoding=unicode)
except:
print 'Failed to process article summary, deleting:'
print summary.encode('utf-8')
traceback.print_exc()
summary = u''
self.text_summary = clean_ascii_chars(summary)
self.author = author
self.content = content
self.date = published
self.utctime = dt_factory(self.date, assume_utc=True, as_utc=True)
self.localtime = self.utctime.astimezone(local_tz)
self._formatted_date = None
@dynamic_property
def formatted_date(self):
def fget(self):
if self._formatted_date is None:
self._formatted_date = strftime(" [%a, %d %b %H:%M]",
t=self.localtime.timetuple())
return self._formatted_date
def fset(self, val):
if isinstance(val, unicode):
self._formatted_date = val
return property(fget=fget, fset=fset)
@dynamic_property
def title(self):
def fget(self):
t = self._title
if not isinstance(t, unicode) and hasattr(t, 'decode'):
t = t.decode('utf-8', 'replace')
return t
def fset(self, val):
self._title = clean_ascii_chars(val)
return property(fget=fget, fset=fset)
def __repr__(self):
return \
(u'''\
Title : %s
URL : %s
Author : %s
Summary : %s
Date : %s
TOC thumb : %s
Has content : %s
'''%(self.title, self.url, self.author, self.summary[:20]+'...',
self.toc_thumbnail, self.localtime.strftime('%a, %d %b, %Y %H:%M'),
bool(self.content))).encode('utf-8')
def __str__(self):
return repr(self)
def is_same_as(self, other_article):
# if self.title != getattr(other_article, 'title', False):
# return False
if self.url:
return self.url == getattr(other_article, 'url', False)
return self.content == getattr(other_article, 'content', False)
class Feed(object):
def __init__(self, get_article_url=lambda item: item.get('link', None),
log=default_log):
'''
Parse a feed into articles.
'''
self.logger = log
self.get_article_url = get_article_url
def populate_from_feed(self, feed, title=None, oldest_article=7,
max_articles_per_feed=100):
entries = feed.entries
feed = feed.feed
self.title = feed.get('title', _('Unknown section')) if not title else title
self.description = feed.get('description', '')
image = feed.get('image', {})
self.image_url = image.get('href', None)
self.image_width = image.get('width', 88)
self.image_height = image.get('height', 31)
self.image_alt = image.get('title', '')
self.articles = []
self.id_counter = 0
self.added_articles = []
self.oldest_article = oldest_article
for item in entries:
if len(self.articles) >= max_articles_per_feed:
break
self.parse_article(item)
def populate_from_preparsed_feed(self, title, articles, oldest_article=7,
max_articles_per_feed=100):
self.title = unicode(title if title else _('Unknown feed'))
self.description = ''
self.image_url = None
self.articles = []
self.added_articles = []
self.oldest_article = oldest_article
self.id_counter = 0
for item in articles:
if len(self.articles) >= max_articles_per_feed:
break
self.id_counter += 1
id = item.get('id', None)
if not id:
id = 'internal id#%s'%self.id_counter
if id in self.added_articles:
return
self.added_articles.append(id)
published = time.gmtime(item.get('timestamp', time.time()))
title = item.get('title', _('Untitled article'))
link = item.get('url', None)
description = item.get('description', '')
content = item.get('content', '')
author = item.get('author', '')
article = Article(id, title, link, author, description, published, content)
delta = utcnow() - article.utctime
if delta.days*24*3600 + delta.seconds <= 24*3600*self.oldest_article:
self.articles.append(article)
else:
t = strftime(u'%a, %d %b, %Y %H:%M', article.localtime.timetuple())
self.logger.debug(u'Skipping article %s (%s) from feed %s as it is too old.'%
(title, t, self.title))
d = item.get('date', '')
article.formatted_date = d
def parse_article(self, item):
self.id_counter += 1
id = item.get('id', None)
if not id:
id = 'internal id#%s'%self.id_counter
if id in self.added_articles:
return
published = None
for date_field in ('date_parsed', 'published_parsed',
'updated_parsed'):
published = item.get(date_field, None)
if published is not None:
break
if not published:
published = time.gmtime()
self.added_articles.append(id)
title = item.get('title', _('Untitled article'))
try:
link = self.get_article_url(item)
except:
self.logger.warning('Failed to get link for %s'%title)
self.logger.debug(traceback.format_exc())
link = None
description = item.get('summary', None)
author = item.get('author', None)
content = [i.value for i in item.get('content', []) if i.value]
content = [i if isinstance(i, unicode) else i.decode('utf-8', 'replace')
for i in content]
content = u'\n'.join(content)
if not content.strip():
content = None
if not link and not content:
return
article = Article(id, title, link, author, description, published, content)
delta = utcnow() - article.utctime
if delta.days*24*3600 + delta.seconds <= 24*3600*self.oldest_article:
self.articles.append(article)
else:
try:
self.logger.debug('Skipping article %s (%s) from feed %s as it is too old.'%
(title, article.localtime.strftime('%a, %d %b, %Y %H:%M'), self.title))
except UnicodeDecodeError:
if not isinstance(title, unicode):
title = title.decode('utf-8', 'replace')
self.logger.debug('Skipping article %s as it is too old'%title)
def reverse(self):
self.articles.reverse()
def __iter__(self):
return iter(self.articles)
def __len__(self):
return len(self.articles)
def __repr__(self):
res = [('%20s\n'%'').replace(' ', '_')+repr(art) for art in self]
return '\n'+'\n'.join(res)+'\n'
def __str__(self):
return repr(self)
def __bool__(self):
for article in self:
if getattr(article, 'downloaded', False):
return True
return False
def has_embedded_content(self):
length = 0
for a in self:
if a.content or a.summary:
length += max(len(a.content if a.content else ''),
len(a.summary if a.summary else ''))
return length > 2000 * len(self)
def has_article(self, article):
for a in self:
if a.is_same_as(article):
return True
return False
def find(self, article):
for i, a in enumerate(self):
if a.is_same_as(article):
return i
return -1
def remove(self, article):
i = self.index(article)
if i > -1:
self.articles[i:i+1] = []
def remove_article(self, article):
try:
self.articles.remove(article)
except ValueError:
pass
class FeedCollection(list):
def __init__(self, feeds):
list.__init__(self, [f for f in feeds if len(f.articles) > 0])
found_articles = set([])
duplicates = set([])
def in_set(s, a):
for x in s:
if a.is_same_as(x):
return x
return None
print '#feeds', len(self)
print map(len, self)
for f in self:
dups = []
for a in f:
first = in_set(found_articles, a)
if first is not None:
dups.append(a)
duplicates.add((first, f))
else:
found_articles.add(a)
for x in dups:
f.articles.remove(x)
self.duplicates = duplicates
print len(duplicates)
print map(len, self)
# raise
def find_article(self, article):
for j, f in enumerate(self):
for i, a in enumerate(f):
if a is article:
return (j, i)
def restore_duplicates(self):
temp = []
for article, feed in self.duplicates:
art = copy.deepcopy(article)
j, i = self.find_article(article)
art.url = '../feed_%d/article_%d/index.html'%(j, i)
temp.append((feed, art))
for feed, art in temp:
feed.articles.append(art)
def feed_from_xml(raw_xml, title=None, oldest_article=7,
max_articles_per_feed=100,
get_article_url=lambda item: item.get('link', None),
log=default_log):
from calibre.web.feeds.feedparser import parse
# Handle unclosed escaped entities. They trip up feedparser and HBR for one
# generates them
raw_xml = re.sub(r'(&#\d+)([^0-9;])', r'\1;\2', raw_xml)
feed = parse(raw_xml)
pfeed = Feed(get_article_url=get_article_url, log=log)
pfeed.populate_from_feed(feed, title=title,
oldest_article=oldest_article,
max_articles_per_feed=max_articles_per_feed)
return pfeed
def feeds_from_index(index, oldest_article=7, max_articles_per_feed=100,
log=default_log):
'''
@param index: A parsed index as returned by L{BasicNewsRecipe.parse_index}.
@return: A list of L{Feed} objects.
@rtype: list
'''
feeds = []
for title, articles in index:
pfeed = Feed(log=log)
pfeed.populate_from_preparsed_feed(title, articles, oldest_article=oldest_article,
max_articles_per_feed=max_articles_per_feed)
feeds.append(pfeed)
return feeds
| gpl-3.0 | -2,584,616,136,062,738,000 | 33.572626 | 105 | 0.535833 | false |
simonwydooghe/ansible | lib/ansible/modules/remote_management/ucs/ucs_wwn_pool.py | 5 | 8556 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: ucs_wwn_pool
short_description: Configures WWNN or WWPN pools on Cisco UCS Manager
description:
- Configures WWNNs or WWPN pools on Cisco UCS Manager.
extends_documentation_fragment: ucs
options:
state:
description:
- If C(present), will verify WWNNs/WWPNs are present and will create if needed.
- If C(absent), will verify WWNNs/WWPNs are absent and will delete if needed.
choices: [present, absent]
default: present
name:
description:
- The name of the World Wide Node Name (WWNN) or World Wide Port Name (WWPN) pool.
- This name can be between 1 and 32 alphanumeric characters.
- "You cannot use spaces or any special characters other than - (hyphen), \"_\" (underscore), : (colon), and . (period)."
- You cannot change this name after the WWNN or WWPN pool is created.
required: yes
purpose:
description:
- Specify whether this is a node (WWNN) or port (WWPN) pool.
- Optional if state is absent.
choices: [node, port]
required: yes
description:
description:
- A description of the WWNN or WWPN pool.
- Enter up to 256 characters.
- "You can use any characters or spaces except the following:"
- "` (accent mark), \ (backslash), ^ (carat), \" (double quote), = (equal sign), > (greater than), < (less than), or ' (single quote)."
aliases: [ descr ]
order:
description:
- The Assignment Order field.
- "This can be one of the following:"
- "default - Cisco UCS Manager selects a random identity from the pool."
- "sequential - Cisco UCS Manager selects the lowest available identity from the pool."
choices: [default, sequential]
default: default
first_addr:
description:
- The first initiator in the World Wide Name (WWN) block.
- This is the From field in the UCS Manager Add WWN Blocks menu.
last_addr:
description:
- The last initiator in the World Wide Name (WWN) block.
- This is the To field in the UCS Manager Add WWN Blocks menu.
- For WWxN pools, the pool size must be a multiple of ports-per-node + 1.
- For example, if there are 7 ports per node, the pool size must be a multiple of 8.
- If there are 63 ports per node, the pool size must be a multiple of 64.
org_dn:
description:
- Org dn (distinguished name)
default: org-root
requirements:
- ucsmsdk
author:
- David Soper (@dsoper2)
- CiscoUcs (@CiscoUcs)
version_added: '2.5'
'''
EXAMPLES = r'''
- name: Configure WWNN/WWPN pools
ucs_wwn_pool:
hostname: 172.16.143.150
username: admin
password: password
name: WWNN-Pool
purpose: node
first_addr: 20:00:00:25:B5:48:00:00
last_addr: 20:00:00:25:B5:48:00:0F
- ucs_wwn_pool:
hostname: 172.16.143.150
username: admin
password: password
name: WWPN-Pool-A
purpose: port
order: sequential
first_addr: 20:00:00:25:B5:48:0A:00
last_addr: 20:00:00:25:B5:48:0A:0F
- name: Remove WWNN/WWPN pools
ucs_wwn_pool:
hostname: 172.16.143.150
username: admin
password: password
name: WWNN-Pool
state: absent
- ucs_wwn_pool:
hostname: 172.16.143.150
username: admin
password: password
name: WWPN-Pool-A
state: absent
'''
RETURN = r'''
#
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.remote_management.ucs import UCSModule, ucs_argument_spec
def main():
argument_spec = ucs_argument_spec
argument_spec.update(
org_dn=dict(type='str', default='org-root'),
name=dict(type='str'),
purpose=dict(type='str', choices=['node', 'port']),
descr=dict(type='str'),
order=dict(type='str', default='default', choices=['default', 'sequential']),
first_addr=dict(type='str'),
last_addr=dict(type='str'),
state=dict(type='str', default='present', choices=['present', 'absent']),
wwn_list=dict(type='list'),
)
# Note that use of wwn_list is an experimental feature which allows multiple resource updates with a single UCSM connection.
# Support for wwn_list may change or be removed once persistent UCS connections are supported.
# Either wwn_list or name is required (user can specify either a list or single resource).
module = AnsibleModule(
argument_spec,
supports_check_mode=True,
required_one_of=[
['wwn_list', 'name']
],
mutually_exclusive=[
['wwn_list', 'name']
],
)
ucs = UCSModule(module)
err = False
from ucsmsdk.mometa.fcpool.FcpoolInitiators import FcpoolInitiators
from ucsmsdk.mometa.fcpool.FcpoolBlock import FcpoolBlock
changed = False
try:
# Only documented use is a single resource, but to also support experimental
# feature allowing multiple updates all params are converted to a wwn_list below.
if module.params['wwn_list']:
# directly use the list (single resource and list are mutually exclusive
wwn_list = module.params['wwn_list']
else:
# single resource specified, create list from the current params
wwn_list = [module.params]
for wwn in wwn_list:
mo_exists = False
props_match = False
# set default params. Done here to set values for lists which can't be done in the argument_spec
if not wwn.get('descr'):
wwn['descr'] = ''
if not wwn.get('order'):
wwn['order'] = 'default'
# dn is <org_dn>/wwn-pool-<name> for WWNN or WWPN
dn = module.params['org_dn'] + '/wwn-pool-' + wwn['name']
mo = ucs.login_handle.query_dn(dn)
if mo:
mo_exists = True
if module.params['state'] == 'absent':
if mo_exists:
if not module.check_mode:
ucs.login_handle.remove_mo(mo)
ucs.login_handle.commit()
changed = True
else:
# append purpose param with suffix used by UCSM
purpose_param = wwn['purpose'] + '-wwn-assignment'
if mo_exists:
# check top-level mo props
kwargs = dict(assignment_order=wwn['order'])
kwargs['descr'] = wwn['descr']
kwargs['purpose'] = purpose_param
if (mo.check_prop_match(**kwargs)):
# top-level props match, check next level mo/props
if 'last_addr' in wwn and 'first_addr' in wwn:
block_dn = dn + '/block-' + wwn['first_addr'].upper() + '-' + wwn['last_addr'].upper()
mo_1 = ucs.login_handle.query_dn(block_dn)
if mo_1:
props_match = True
else:
props_match = True
if not props_match:
if not module.check_mode:
# create if mo does not already exist
mo = FcpoolInitiators(
parent_mo_or_dn=module.params['org_dn'],
name=wwn['name'],
descr=wwn['descr'],
assignment_order=wwn['order'],
purpose=purpose_param,
)
if 'last_addr' in wwn and 'first_addr' in wwn:
mo_1 = FcpoolBlock(
parent_mo_or_dn=mo,
to=wwn['last_addr'],
r_from=wwn['first_addr'],
)
ucs.login_handle.add_mo(mo, True)
ucs.login_handle.commit()
changed = True
except Exception as e:
err = True
ucs.result['msg'] = "setup error: %s " % str(e)
ucs.result['changed'] = changed
if err:
module.fail_json(**ucs.result)
module.exit_json(**ucs.result)
if __name__ == '__main__':
main()
| gpl-3.0 | -3,517,350,644,316,414,500 | 34.94958 | 139 | 0.571412 | false |
AzePUG/Data_Structures_Algo_Python | Source_Code/python_kodlar/fesil9/fesil9_problems_solutions.py | 2 | 13705 | from queue import Queue
class Node:
def __init__(self, data):
# root node
self.data = data
# Sol övlad(left child)
self.left = None
# Sağ övlad(right child)
self.right = None
def get_data(self):
return self.data
def get_left_child(self):
return self.left
def get_right_child(self):
return self.right
class BinaryTreeExercises:
def __init__(self):
self.root = None
self.max_data = 0.0
def create_tree(self, val):
# Ağacın özünü burda yaradırıq.
if self.root is None:
# Birinci elementi root element edirik
self.root = Node(data=val)
else:
# Root-u hazırkı node edirik
current = self.root
while True:
# Əgər verilmiş qiymət hal-hazırkı qiymətdən kiçikdirsə,
# Onu sol node edirik
if val < current.data:
if current.left:
current = current.left
else:
current.left = Node(data=val)
break;
# Əgər verilmiş qiymət hal-hazırkı qiymətdən böyükdürsə,
# Onu sağ node edirik
elif val > current.data:
if current.right:
current = current.right
else:
current.right = Node(data=val)
else:
break
def find_max_recursive(self, node):
if not node:
return self.max_data
if node.get_data() > self.max_data:
self.max_data = node.get_data()
self.find_max_recursive(node.left)
self.find_max_recursive(node.right)
return self.max_data
def find_max_level_order_traversal(self, node):
if node is not None:
q = Queue()
q.put(node) # root node-u daxil edirik.
while not q.empty():
node = q.get() # Dequeue FIFO
# növbədən çıxartdıqdan sonra müqayisə edirik.
if node.get_data() > self.max_data:
self.max_data = node.get_data()
if node.left is not None:
q.put(node.left)
if node.right is not None:
q.put(node.right)
return self.max_data
def find_data_recursive(self, node, data):
if node is None:
return 0
if node.get_data() == data:
return 1
elif data < node.get_data():
return self.find_data_recursive(node.left, data)
else:
return self.find_data_recursive(node.right, data)
def find_data_level_order_traversal(self, node, data):
if node is not None:
q = Queue()
q.put(node) # root node-u daxil edirik.
while not q.empty():
node = q.get() # Dequeue FIFO
# növbədən çıxartdıqdan sonra yoxlayırıq.
if node.get_data() == data:
return 1
if node.left is not None:
q.put(node.left)
if node.right is not None:
q.put(node.right)
# 0 qayıdırsa, deməli data Ağacda yoxdur.
return 0
def insert_in_binary_using_tree_level_order(self, node, data):
new_node = Node(data)
if node is None:
node = new_node # Ağac boşdursa, yeni node-u root node edirik.
return node
q = Queue()
q.put(node) # Root node-u növbəyə daxil edirik.
while not q.empty():
node = q.get() # Dequeue FIFO
# növbədən çıxartdıqdan sonra yoxlayırıq.
if node.get_data() == data:
return "Already in tree"
if node.left is not None:
q.put(node.left)
else:
# Əgər hal-hazırkı node-un datasından kiçikdirsə
if new_node.get_data() < node.get_data():
node.left = new_node
return "Inserted as left node"
if node.right is not None:
q.put(node.right)
else:
# Əgər hal-hazırkı node-un datasından böyükdürsə
if new_node.get_data() > node.get_data():
node.right = new_node
return "Inserted as right node"
def find_tree_size_recursive(self, node):
if node is None:
return 0
return self.find_tree_size_recursive(node.left) + self.find_tree_size_recursive(node.right) + 1
def find_tree_size_iterative(self, node):
if node is None:
return 0
q = Queue()
q.put(node) # Root node-u növbəyə daxil edirik.
count = 0
while not q.empty():
node = q.get()
count = count + 1
if node.left is not None:
q.put(node.left)
if node.right is not None:
q.put(node.right)
return count
def level_order_traversal_in_reverse(self, node):
if node is None:
return 0
q = Queue()
s = [] # LIFO üçün istifadə edəcəyimiz list
q.put(node) # root node-u daxil edirik.
while not q.empty():
node = q.get() # Dequeue FIFO
s.append(node.get_data()) # LIFO
if node.left is not None:
q.put(node.left)
if node.right is not None:
q.put(node.right)
while(s):
print(s.pop(), end=' ')
def delete_binary_tree(self, node):
if node is None:
return
self.delete_binary_tree(node.left)
self.delete_binary_tree(node.right)
node.data = None
node.left = None
node.right = None
self.root = None
def max_depth_recursive(self, node):
if node is None:
return 0
return max(self.max_depth_recursive(node.left), self.max_depth_recursive(node.right)) + 1
def max_depth_iterative(self, node):
if node is None:
return 0
q_list = []
q_list.append([node, 1])
while q_list:
node, depth = q_list.pop() # Buna Python-da unpacking deyilir.
if node.left is not None:
q_list.append([node.left, depth + 1]) # Əgər sol node varsa, onu listə əlavə et və depth-i artır.
if node.right is not None:
q_list.append([node.right, depth + 1]) # Əgər sağ node varsa, onu listə əlavə et və depth-i artır.
return depth
def deepest_node(self, node):
if node is None:
return 0
q = Queue()
q.put(node)
while not q.empty():
node = q.get()
if node.left is not None:
q.put(node.left)
if node.right is not None:
q.put(node.right)
return node.get_data()
def number_of_leafs_iterative(self, node):
if node is None:
return 0
q = Queue()
q.put(node)
count = 0 # sayğac
while not q.empty():
node = q.get()
if (node.left is None) and (node.right is None):
count = count + 1
else:
if node.left is not None:
q.put(node.left)
if node.right is not None:
q.put(node.right)
return count
def number_of_full_nodes_iterative(self, node):
if node is None:
return 0
q = Queue()
q.put(node)
count = 0 # sayğac
while not q.empty():
node = q.get()
if (node.left is not None) and (node.right is not None):
count = count + 1
else:
if node.left is not None:
q.put(node.left)
if node.right is not None:
q.put(node.right)
return count
def number_of_half_nodes_iterative(self, node):
if node is None:
return 0
q = Queue()
q.put(node)
count = 0 # sayğac
while not q.empty():
node = q.get()
if (node.left is None and node.right is not None) or \
(node.right is None and node.left is not None):
count = count + 1
else:
if node.left is not None:
q.put(node.left)
if node.right is not None:
q.put(node.right)
return count
def check_tree_structures_to_be_same(self, node1, node2):
# Əgər ağacların nə sol nə də sağ altağacları qalıbsa və data-lar da bərabərdirsə o zaman True qaytarırır.
if (not node1.left) and \
(not node1.right) and \
(not node2.left) and \
(not node2.right) and node1.data == node2.data:
return True
# Aşağıda isə görürük ki, iki ağac bir-biri ilə fərqlənir
if (node1.data != node2.data) or (node1.left and not node2.left) or \
(not node1.left and node2.left) or (node1.right and not node2.right) or \
(not node1.right and node2.right):
return False
left = self.check_tree_structures_to_be_same(node1.left, node2.left) if node1.left and node2.left else True
right = self.check_tree_structures_to_be_same(node1.right, node2.right) if node1.right and node2.right else True
return left and right
def diameter_of_tree(self, node):
if node is None:
return 0
# sol və sağ altağacların hündürlüyünü tapırıq.
lheight = self.max_depth_recursive(node.left)
rheight = self.max_depth_recursive(node.right)
# sol və sağ altağacların diametrlərini tapırıq.
ldiameter = self.diameter_of_tree(node.left)
rdiameter = self.diameter_of_tree(node.right)
# Ağac üçün max dəyəri qaytarmalıyı.
# sol və sağ altağacın diametrləri arasından maksimumu tapırıq.
# sol və sağ altağacın hündürlüklərini cəmləyib üzərinə 1 gəlirik.
# Yuxarıdakı ikisində maksimumu tapırıq.
return max(lheight + rheight + 1, max(ldiameter, rdiameter))
if __name__ == "__main__":
tree = BinaryTreeExercises()
arr = [8, 3, 1, 6, 4, 7, 10, 14, 13]
for i in arr:
tree.create_tree(i)
print("find_max_recursive() -> ", end='')
print(tree.find_max_recursive(tree.root))
print("find_max_level_order_traversal() -> ", end='')
print(tree.find_max_level_order_traversal(tree.root))
print("find_data_recursive() search 88 -> ", end='')
print(tree.find_data_recursive(tree.root, 88))
print("find_data_recursive() search 14 -> ", end='')
print(tree.find_data_recursive(tree.root, 14))
print("find_data_level_order_traversal() search 88 -> ", end='')
print(tree.find_data_level_order_traversal(tree.root, 88))
print("find_data_level_order_traversal() search 14 -> ", end='')
print(tree.find_data_level_order_traversal(tree.root, 14))
print("insert_in_binary_using_tree_level_order(tree.root, 21) -> ", end='')
print(tree.insert_in_binary_using_tree_level_order(tree.root, 21))
print("find_tree_size_recursive() -> ", end='')
print(tree.find_tree_size_recursive(tree.root))
print("find_tree_size_iterative() -> ", end='')
print(tree.find_tree_size_iterative(tree.root))
print("level_order_traversal_in_reverse() -> ", end='')
tree.level_order_traversal_in_reverse(tree.root)
# print("")
# print("delete_binary_tree() -> ")
# tree.delete_binary_tree(tree.root)
# print("find_tree_size_recursive() -> ", end='')
# print(tree.find_tree_size_recursive(tree.root))
# print("find_tree_size_iterative() -> ", end='')
# print(tree.find_tree_size_iterative(tree.root))
print("")
print("max_depth_recursive() -> ", end='')
print(tree.max_depth_recursive(tree.root))
print("max_depth_iterative() -> ", end='')
print(tree.max_depth_iterative(tree.root))
print("deepest_node() -> ", end='')
print(tree.deepest_node(tree.root))
print("number_of_leafs_iterative() -> ", end = '')
print(tree.number_of_leafs_iterative(tree.root))
print("number_of_full_nodes_iterative() -> ", end='')
print(tree.number_of_full_nodes_iterative(tree.root))
print("number_of_half_nodes_iterative() -> ", end='')
print(tree.number_of_half_nodes_iterative(tree.root))
tree2 = BinaryTreeExercises()
arr2 = [8, 3, 1, 6, 4, 7, 10, 14, 13]
for i in arr2:
tree2.create_tree(i)
print("insert_in_binary_using_tree_level_order(tree2.root, 21) -> ", end='')
print(tree.insert_in_binary_using_tree_level_order(tree2.root, 21))
print("check_tree_structures_to_be_same(tree.root, tree2.root) -> ", end='')
print(tree.check_tree_structures_to_be_same(tree.root, tree2.root))
print("insert_in_binary_using_tree_level_order(tree2.root, 88) -> ", end='')
print(tree.insert_in_binary_using_tree_level_order(tree2.root, 88))
print("check_tree_structures_to_be_same(tree.root, tree2.root) -> ", end='')
print(tree.check_tree_structures_to_be_same(tree.root, tree2.root))
print("diameter_of_tree() -> ", end='')
print(tree.diameter_of_tree(tree.root))
| mit | -3,437,561,034,992,447,500 | 32.867168 | 120 | 0.540591 | false |
eclee25/flu-SDI-exploratory-age | scripts/OR_subtype_vaxmatch_v6-16-13.py | 1 | 4864 | #!/usr/bin/python
##############################################
###Python template
###Author: Elizabeth Lee
###Date: June 16, 2013
###Function:
#### generate a metric that represents the potential interactive effect between the prominent subtype and the vax strain match for the prominent subtype
#### draw a plot of OR (y-axis) vs this interaction metric (x-axis)
#### same plot is represented in two ways -- labels are season number or prominent subtype(s)
### Updates from 6/4/14 version
#### 1) call ORgenerator_v060713
#### 2) change OR from normalization by US population to normalization by zip3 popstat
#### 3) change import data to zipcode_bysseas
###Import data: subtype.csv, zipcode_bysseas_cl_v6-12-13.csv
###Command Line: python OR_subtype_vaxmatch_v6-12-13.py
##############################################
### notes ###
# potential interactive effect: vax match and prominent subtypes. % isolates that are H1 * % H1 isolates that matched H1 vax strain = # H1 isolates/# isolates * # H1 matched isolates/# H1 isolates
### packages ###
import matplotlib
import csv
import numpy as np
import matplotlib.pyplot as plt
from pylab import *
## local packages ##
import ORgenerator_v060713 as od # 6/16 add script
### data structures ###
# 6/16 add lists to correspond to new functions
child1, adult1, y1, z3s, snum_sdi = [],[],[],[],[] # attack rates for children and adults for all cases, odds ratios for all cases, zip3s in dataset; season number code in import dataset
avgOR1, sdOR1 = [],[] # average ORs across zip3s for each season, standard deviation of ORs for each season (dispersion of zip3 ORs around the mean)
seasonnum, match_iso, psubtypelab = [],[],[] # season number, # matched isolates for prominent subtypes/# total isolates, prominent subtype label
### parameters ###
# USchild = 20348657 + 20677194 + 22040343 #US child popn
# USadult = 21585999 + 21101849 + 19962099 + 20179642 + 20890964 + 22708591 + 22298125 + 19664805 #US adult popn
### functions ###
# 6/16 rm in-script functions for OR generation
def subtype_vaxmatch_import (csvreadfile, season, interact, s_label):
for row in csvreadfile:
H1i, H3i, Bi, TOTi = float(row[4]), float(row[5]), float(row[6]), float(row[7])
H1im, H3im, Bim, TOTim = float(row[8]), float(row[9]), float(row[10]), float(row[11])
season.append(int(row[0])) # season number
s_label.append(row[2])
val = int(row[3])
# subtype value determines how percentage will be calculated
if val == 1: # H1 matched isolates/# isolates
interact.append(H1im/TOTi)
elif val == 2: # H3 matched isolates/# isolates
interact.append(H3im/TOTi)
elif val == 5: # H1+B matched isolates/# isolates
interact.append((H1im+Bim)/TOTi)
elif val == 6: # H3+B matched isolates/# isolates
interact.append((H3im+Bim)/TOTi)
elif val == 7: # H1+H3+B matched isolates/# isolates
interact.append((H1im+H3im+Bim)/TOTi)
#print val, H1im, H3im, Bim, TOTi
### import data ###
d1in=open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/R_export/zipcode_bysseas_cl_v6-12-13.csv','r') # 6/16 change file name
d1=csv.reader(d1in, delimiter=',')
subtypein=open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/SQL_export/subtype4.csv','r')
subtype=csv.reader(subtypein, delimiter=',')
### program ###
# 6/16 change functions and variable names
od.importer_ORzip3(d1, adult1, child1, 3, 4, z3s, 2, snum_sdi)
od.ORgen_zip3mn(y1, child1, adult1, snum_sdi, avgOR1, sdOR1)
subtype_vaxmatch_import(subtype, seasonnum, match_iso, psubtypelab)
print match_iso
# plot OR vs # matched isolates of prominent subtypes that season / # total isolates (labels represent season num)
plt.errorbar(match_iso, avgOR1, yerr=sdOR1, marker='o', color = 'black', label= "all cases", linestyle='None')
for num, perc, OR in zip(seasonnum, match_iso, avgOR1):
plt.annotate(num, xy = (perc, OR), xytext = (-10,5), textcoords = 'offset points')
plt.ylabel('Odds ratio of attack rate, child:adult (zip3 popstat normalized)')
plt.xlabel('Matched isolates (prominent subtypes only)/ Total isolates')
plt.legend(loc="upper left")
ylim([2, 7]) # 6/16 add ylim bc error bars are very large
plt.show()
# same plot as above except labels are prominent subtype
plt.errorbar(match_iso, avgOR1, yerr=sdOR1, marker='o', color = 'black', label= "all cases", linestyle='None')
for lab, perc, OR in zip(psubtypelab, match_iso, avgOR1):
plt.annotate(lab, xy = (perc, OR), xytext = (-8,6), textcoords = 'offset points')
# plt.ylabel('Odds ratio of attack rate, child:adult (zip3 popstat normalized)')
plt.ylabel('OR, seasonal attack rate', fontsize=24)
# plt.xlabel('Matched isolates (prominent subtypes only)/ Total isolates')
plt.xlabel('Magnitude of Matched Isolates Among Prominent Subtypes', fontsize=24) # 12/1/13 presentation label
plt.legend(loc="upper left")
ylim([2, 7])
plt.show()
| mit | 2,004,143,897,823,384,000 | 37.912 | 196 | 0.701891 | false |
credp/lisa | external/workload-automation/wa/framework/exception.py | 3 | 4563 | # Copyright 2013-2018 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=unused-import
from devlib.exception import (DevlibError, HostError, TimeoutError, # pylint: disable=redefined-builtin
TargetError, TargetNotRespondingError)
from wa.utils.misc import get_traceback
class WAError(Exception):
"""Base class for all Workload Automation exceptions."""
@property
def message(self):
if self.args:
return self.args[0]
return ''
class NotFoundError(WAError):
"""Raised when the specified item is not found."""
class ValidationError(WAError):
"""Raised on failure to validate an extension."""
class ExecutionError(WAError):
"""Error encountered by the execution framework."""
class WorkloadError(WAError):
"""General Workload error."""
class JobError(WAError):
"""Job execution error."""
class InstrumentError(WAError):
"""General Instrument error."""
class OutputProcessorError(WAError):
"""General OutputProcessor error."""
class ResourceError(WAError):
"""General Resolver error."""
class CommandError(WAError):
"""Raised by commands when they have encountered an error condition
during execution."""
class ToolError(WAError):
"""Raised by tools when they have encountered an error condition
during execution."""
class ConfigError(WAError):
"""Raised when configuration provided is invalid. This error suggests that
the user should modify their config and try again."""
class SerializerSyntaxError(Exception):
"""
Error loading a serialized structure from/to a file handle.
"""
@property
def message(self):
if self.args:
return self.args[0]
return ''
def __init__(self, message, line=None, column=None):
super(SerializerSyntaxError, self).__init__(message)
self.line = line
self.column = column
def __str__(self):
linestring = ' on line {}'.format(self.line) if self.line else ''
colstring = ' in column {}'.format(self.column) if self.column else ''
message = 'Syntax Error{}: {}'
return message.format(''.join([linestring, colstring]), self.message)
class PluginLoaderError(WAError):
"""Raised when there is an error loading an extension or
an external resource. Apart form the usual message, the __init__
takes an exc_info parameter which should be the result of
sys.exc_info() for the original exception (if any) that
caused the error."""
def __init__(self, message, exc_info=None):
super(PluginLoaderError, self).__init__(message)
self.exc_info = exc_info
def __str__(self):
if self.exc_info:
orig = self.exc_info[1]
orig_name = type(orig).__name__
if isinstance(orig, WAError):
reason = 'because of:\n{}: {}'.format(orig_name, orig)
else:
text = 'because of:\n{}\n{}: {}'
reason = text.format(get_traceback(self.exc_info), orig_name, orig)
return '\n'.join([self.message, reason])
else:
return self.message
class WorkerThreadError(WAError):
"""
This should get raised in the main thread if a non-WAError-derived
exception occurs on a worker/background thread. If a WAError-derived
exception is raised in the worker, then it that exception should be
re-raised on the main thread directly -- the main point of this is to
preserve the backtrace in the output, and backtrace doesn't get output for
WAErrors.
"""
def __init__(self, thread, exc_info):
self.thread = thread
self.exc_info = exc_info
orig = self.exc_info[1]
orig_name = type(orig).__name__
text = 'Exception of type {} occured on thread {}:\n{}\n{}: {}'
message = text.format(orig_name, thread, get_traceback(self.exc_info),
orig_name, orig)
super(WorkerThreadError, self).__init__(message)
| apache-2.0 | 2,332,844,489,602,603,000 | 30.6875 | 104 | 0.653079 | false |
rbarrois/factory_boy | factory/faker.py | 1 | 2223 | # -*- coding: utf-8 -*-
# Copyright: See the LICENSE file.
"""Additional declarations for "faker" attributes.
Usage:
class MyFactory(factory.Factory):
class Meta:
model = MyProfile
first_name = factory.Faker('name')
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import contextlib
import faker
import faker.config
from . import declarations
class Faker(declarations.BaseDeclaration):
"""Wrapper for 'faker' values.
Args:
provider (str): the name of the Faker field
locale (str): the locale to use for the faker
All other kwargs will be passed to the underlying provider
(e.g ``factory.Faker('ean', length=10)``
calls ``faker.Faker.ean(length=10)``)
Usage:
>>> foo = factory.Faker('name')
"""
def __init__(self, provider, **kwargs):
super(Faker, self).__init__()
self.provider = provider
self.provider_kwargs = kwargs
self.locale = kwargs.pop('locale', None)
def generate(self, extra_kwargs=None):
kwargs = {}
kwargs.update(self.provider_kwargs)
kwargs.update(extra_kwargs or {})
subfaker = self._get_faker(self.locale)
return subfaker.format(self.provider, **kwargs)
def evaluate(self, instance, step, extra):
return self.generate(extra)
_FAKER_REGISTRY = {}
_DEFAULT_LOCALE = faker.config.DEFAULT_LOCALE
@classmethod
@contextlib.contextmanager
def override_default_locale(cls, locale):
old_locale = cls._DEFAULT_LOCALE
cls._DEFAULT_LOCALE = locale
try:
yield
finally:
cls._DEFAULT_LOCALE = old_locale
@classmethod
def _get_faker(cls, locale=None):
if locale is None:
locale = cls._DEFAULT_LOCALE
if locale not in cls._FAKER_REGISTRY:
subfaker = faker.Faker(locale=locale)
cls._FAKER_REGISTRY[locale] = subfaker
return cls._FAKER_REGISTRY[locale]
@classmethod
def add_provider(cls, provider, locale=None):
"""Add a new Faker provider for the specified locale"""
cls._get_faker(locale).add_provider(provider)
| mit | 6,703,169,373,619,360,000 | 25.152941 | 66 | 0.622132 | false |
apinsard/qtile | libqtile/widget/countdown.py | 11 | 2364 | # Copyright (c) 2014 Sean Vig
# Copyright (c) 2014 roger
# Copyright (c) 2014 Tycho Andersen
# Copyright (c) 2014 Adi Sieker
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from datetime import datetime
from . import base
class Countdown(base.InLoopPollText):
"""
A simple countdown timer text widget.
"""
orientations = base.ORIENTATION_HORIZONTAL
defaults = [
('format', '{D}d {H}h {M}m {S}s',
'Format of the displayed text. Available variables:'
'{D} == days, {H} == hours, {M} == minutes, {S} seconds.'),
('update_interval', 1., 'Update interval in seconds for the clock'),
('date', datetime.now(), "The datetime for the endo of the countdown"),
]
def __init__(self, **config):
base.InLoopPollText.__init__(self, **config)
self.add_defaults(Countdown.defaults)
def poll(self):
now = datetime.now()
days = hours = minutes = seconds = 0
if not self.date < now:
delta = self.date - now
days = delta.days
hours, rem = divmod(delta.seconds, 3600)
minutes, seconds = divmod(rem, 60)
data = {"D": "%02d" % days,
"H": "%02d" % hours,
"M": "%02d" % minutes,
"S": "%02d" % seconds}
return self.format.format(**data)
| mit | -5,469,668,473,567,863,000 | 37.754098 | 79 | 0.653553 | false |
heimlich1024/OD_CopyPasteExternal | Blender/BLENDER_ExportToExternal.py | 2 | 4435 | bl_info = {
"name": "Copy To External",
"version": (1, 0),
"author": "Oliver Hotz",
"description": "Copies current object to clipboard for use in other applications / instances",
"category": "Object"
}
import bpy, tempfile, os
from mathutils import Vector
class CopyToExternal(bpy.types.Operator):
"""Object Cursor Array"""
bl_idname = "object.copy_to_external"
bl_label = "Copy To External"
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
def OD_CopyToExternal(_name, size):
def mesh_to_weight_list(ob, me):
# clear the vert group.
group_names = [g.name for g in ob.vertex_groups]
group_names_tot = len(group_names)
if not group_names_tot:
# no verts? return a vert aligned empty list
return [[] for i in range(len(me.vertices))], []
else:
weight_ls = [[0.0] * group_names_tot for i in range(len(me.vertices))]
for i, v in enumerate(me.vertices):
for g in v.groups:
# possible weights are out of range
index = g.group
if index < group_names_tot:
weight_ls[i][index] = g.weight
return group_names, weight_ls
# get active object
obj = bpy.context.active_object
mesh = obj.data
point_count = len(obj.data.vertices)
poly_count = len(obj.data.polygons)
file = tempfile.gettempdir() + os.sep + "ODVertexData.txt"
f = open(file, "w")
f.write ("VERTICES:" + str(point_count) + "\n")
#write Vertices
for vert in obj.data.vertices:
f.write(str(vert.co[0]) + " " + str(vert.co[2]) + " " + str(vert.co[1]*-1) + "\n")
#write polygons-point connection for poly reconstructions
f.write("POLYGONS:" + str(poly_count) + "\n")
for poly in obj.data.polygons:
surf = "Default"
if len(obj.material_slots)!= 0:
slot = obj.material_slots[poly.material_index]
surf = slot.name
ppoint = ""
polytype = "FACE"
for idx in poly.vertices:
ppoint += "," + str(obj.data.vertices[idx].index)
f.write(ppoint[1:] + ";;" + str(surf) + ";;" + polytype + "\n")
#write all weights
result1, result2 = mesh_to_weight_list(obj, mesh)
if len(result1[0]) > 0:
count = 0
for weight in result1:
f.write("WEIGHT:" + weight + "\n")
for r in result2:
f.write(str(r[count]) + "\n")
count += 1
#write all morphs
for keys in bpy.data.shape_keys:
for key in keys.key_blocks[1:]:
f.write("MORPH:" + key.name + "\n")
basis_verts = keys.key_blocks[0].data
for j, kv in enumerate(key.data):
delta = kv.co - basis_verts[j].co
f.write(str(delta[0]) + " " + str(delta[2]) + " " + str(delta[1]*-1) + "\n")
#UVs
for j, ul in enumerate(mesh.uv_layers):
uv = []
for poly in mesh.polygons:
for i in poly.loop_indices:
l = mesh.loops[i]
v = mesh.vertices[l.vertex_index]
uv.append([str(ul.data[l.index].uv[0]), str(ul.data[l.index].uv[1]), str(poly.index), str(l.vertex_index) ])
f.write("UV:" + ul.name + ":" + str(len(uv)) + "\n")
for entry in uv:
f.write(entry[0] + " " + entry[1] + ":PLY:" + entry[2] + ":PNT:" + entry[3] + "\n")
# call the function
new_mesh = OD_CopyToExternal('ODCopy', 1)
return {'FINISHED'}
def menu_func(self, context):
self.layout.operator(CopyToExternal.bl_idname)
def register():
bpy.utils.register_class(CopyToExternal)
bpy.types.VIEW3D_MT_object.append(menu_func)
def unregister():
bpy.utils.unregister_class(CopyToExternal)
bpy.types.VIEW3D_MT_object.remove(menu_func)
if __name__ == "__main__":
register() | apache-2.0 | 7,388,787,439,722,112,000 | 37.241379 | 132 | 0.488613 | false |
scottrice/pysteam | pysteam/grid.py | 3 | 2425 | # encoding: utf-8
import os
import shutil
import paths
VALID_EXTENSIONS = [
'.png',
'.jpg',
'.jpeg',
'.tga',
]
VALID_EXTENSIONS_WITHOUT_DOT = map(lambda ext: ext[1:], VALID_EXTENSIONS)
def is_valid_extension(extension):
"""Returns True is `extension` is a valid image extension to be used with
custom Steam grid images. There are only 4 such extensions - `.png`, `.jpg`,
`.jpeg`, and `.tga`.
This function will return true even if the parameter `expression` does not
include the leading '.'"""
return extension in VALID_EXTENSIONS or \
extension in VALID_EXTENSIONS_WITHOUT_DOT
def _valid_custom_image_paths(user_context, app_id):
parent_dir = paths.custom_images_directory(user_context)
possible_filenames = map(lambda ext: str(app_id) + ext, VALID_EXTENSIONS)
return map(lambda f: os.path.join(parent_dir, f), possible_filenames)
def has_custom_image(user_context, app_id):
"""Returns True if there exists a custom image for app_id."""
possible_paths = _valid_custom_image_paths(user_context, app_id)
return any(map(os.path.exists, possible_paths))
def get_custom_image(user_context, app_id):
"""Returns the custom image associated with a given app. If there are
multiple candidate images on disk, one is chosen arbitrarily."""
possible_paths = _valid_custom_image_paths(user_context, app_id)
existing_images = filter(os.path.exists, possible_paths)
if len(existing_images) > 0:
return existing_images[0]
def set_custom_image(user_context, app_id, image_path):
"""Sets the custom image for `app_id` to be the image located at
`image_path`. If there already exists a custom image for `app_id` it will
be deleted. Returns True is setting the image was successful."""
if image_path is None:
return False
if not os.path.exists(image_path):
return False
(root, ext) = os.path.splitext(image_path)
if not is_valid_extension(ext):
# TODO: Maybe log that this happened?
return False
# If we don't remove the old image then theres no guarantee that Steam will
# show our new image when it launches.
if has_custom_image(user_context, app_id):
img = get_custom_image(user_context, app_id)
assert(img is not None)
os.remove(img)
# Set the new image
parent_dir = paths.custom_images_directory(user_context)
new_path = os.path.join(parent_dir, app_id + ext)
shutil.copyfile(image_path, new_path)
return True | mit | 8,520,886,058,365,924,000 | 33.657143 | 78 | 0.710515 | false |
m-sanders/wagtail | wagtail/wagtailadmin/urls.py | 4 | 6431 | from django.conf.urls import url
from django.contrib.auth.decorators import permission_required
from django.contrib.auth import views as django_auth_views
from django.views.decorators.cache import cache_control
from wagtail.wagtailadmin.forms import PasswordResetForm
from wagtail.wagtailadmin.views import account, chooser, home, pages, tags, userbar, page_privacy
from wagtail.wagtailcore import hooks
from wagtail.utils.urlpatterns import decorate_urlpatterns
urlpatterns = [
url(r'^$', home.home, name='wagtailadmin_home'),
url(r'^failwhale/$', home.error_test, name='wagtailadmin_error_test'),
url(r'^explorer-nav/$', pages.explorer_nav, name='wagtailadmin_explorer_nav'),
url(r'^pages/$', pages.index, name='wagtailadmin_explore_root'),
url(r'^pages/(\d+)/$', pages.index, name='wagtailadmin_explore'),
url(r'^pages/new/(\w+)/(\w+)/(\d+)/$', pages.create, name='wagtailadmin_pages_create'),
url(r'^pages/new/(\w+)/(\w+)/(\d+)/preview/$', pages.preview_on_create, name='wagtailadmin_pages_preview_on_create'),
url(r'^pages/usage/(\w+)/(\w+)/$', pages.content_type_use, name='wagtailadmin_pages_type_use'),
url(r'^pages/(\d+)/edit/$', pages.edit, name='wagtailadmin_pages_edit'),
url(r'^pages/(\d+)/edit/preview/$', pages.preview_on_edit, name='wagtailadmin_pages_preview_on_edit'),
url(r'^pages/preview/$', pages.preview, name='wagtailadmin_pages_preview'),
url(r'^pages/preview_loading/$', pages.preview_loading, name='wagtailadmin_pages_preview_loading'),
url(r'^pages/(\d+)/view_draft/$', pages.view_draft, name='wagtailadmin_pages_view_draft'),
url(r'^pages/(\d+)/add_subpage/$', pages.add_subpage, name='wagtailadmin_pages_add_subpage'),
url(r'^pages/(\d+)/delete/$', pages.delete, name='wagtailadmin_pages_delete'),
url(r'^pages/(\d+)/unpublish/$', pages.unpublish, name='wagtailadmin_pages_unpublish'),
url(r'^pages/search/$', pages.search, name='wagtailadmin_pages_search'),
url(r'^pages/(\d+)/move/$', pages.move_choose_destination, name='wagtailadmin_pages_move'),
url(r'^pages/(\d+)/move/(\d+)/$', pages.move_choose_destination, name='wagtailadmin_pages_move_choose_destination'),
url(r'^pages/(\d+)/move/(\d+)/confirm/$', pages.move_confirm, name='wagtailadmin_pages_move_confirm'),
url(r'^pages/(\d+)/set_position/$', pages.set_page_position, name='wagtailadmin_pages_set_page_position'),
url(r'^pages/(\d+)/copy/$', pages.copy, name='wagtailadmin_pages_copy'),
url(r'^pages/moderation/(\d+)/approve/$', pages.approve_moderation, name='wagtailadmin_pages_approve_moderation'),
url(r'^pages/moderation/(\d+)/reject/$', pages.reject_moderation, name='wagtailadmin_pages_reject_moderation'),
url(r'^pages/moderation/(\d+)/preview/$', pages.preview_for_moderation, name='wagtailadmin_pages_preview_for_moderation'),
url(r'^pages/(\d+)/privacy/$', page_privacy.set_privacy, name='wagtailadmin_pages_set_privacy'),
url(r'^pages/(\d+)/lock/$', pages.lock, name='wagtailadmin_pages_lock'),
url(r'^pages/(\d+)/unlock/$', pages.unlock, name='wagtailadmin_pages_unlock'),
url(r'^choose-page/$', chooser.browse, name='wagtailadmin_choose_page'),
url(r'^choose-page/(\d+)/$', chooser.browse, name='wagtailadmin_choose_page_child'),
url(r'^choose-external-link/$', chooser.external_link, name='wagtailadmin_choose_page_external_link'),
url(r'^choose-email-link/$', chooser.email_link, name='wagtailadmin_choose_page_email_link'),
url(r'^tag-autocomplete/$', tags.autocomplete, name='wagtailadmin_tag_autocomplete'),
url(r'^account/$', account.account, name='wagtailadmin_account'),
url(r'^account/change_password/$', account.change_password, name='wagtailadmin_account_change_password'),
url(r'^account/notification_preferences/$', account.notification_preferences, name='wagtailadmin_account_notification_preferences'),
url(r'^logout/$', account.logout, name='wagtailadmin_logout'),
]
# Import additional urlpatterns from any apps that define a register_admin_urls hook
for fn in hooks.get_hooks('register_admin_urls'):
urls = fn()
if urls:
urlpatterns += urls
# Add "wagtailadmin.access_admin" permission check
urlpatterns = decorate_urlpatterns(urlpatterns,
permission_required(
'wagtailadmin.access_admin',
login_url='wagtailadmin_login'
)
)
# These url patterns do not require an authenticated admin user
urlpatterns += [
url(r'^login/$', account.login, name='wagtailadmin_login'),
# These two URLs have the "permission_required" decorator applied directly
# as they need to fail with a 403 error rather than redirect to the login page
url(r'^userbar/(\d+)/$', userbar.for_frontend, name='wagtailadmin_userbar_frontend'),
url(r'^userbar/moderation/(\d+)/$', userbar.for_moderation, name='wagtailadmin_userbar_moderation'),
# Password reset
url(
r'^password_reset/$', django_auth_views.password_reset, {
'template_name': 'wagtailadmin/account/password_reset/form.html',
'email_template_name': 'wagtailadmin/account/password_reset/email.txt',
'subject_template_name': 'wagtailadmin/account/password_reset/email_subject.txt',
'password_reset_form': PasswordResetForm,
'post_reset_redirect': 'wagtailadmin_password_reset_done',
}, name='wagtailadmin_password_reset'
),
url(
r'^password_reset/done/$', django_auth_views.password_reset_done, {
'template_name': 'wagtailadmin/account/password_reset/done.html'
}, name='wagtailadmin_password_reset_done'
),
url(
r'^password_reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
django_auth_views.password_reset_confirm, {
'template_name': 'wagtailadmin/account/password_reset/confirm.html',
'post_reset_redirect': 'wagtailadmin_password_reset_complete',
}, name='wagtailadmin_password_reset_confirm',
),
url(
r'^password_reset/complete/$', django_auth_views.password_reset_complete, {
'template_name': 'wagtailadmin/account/password_reset/complete.html'
}, name='wagtailadmin_password_reset_complete'
),
]
# Decorate all views with cache settings to prevent caching
urlpatterns = decorate_urlpatterns(urlpatterns,
cache_control(private=True, no_cache=True, no_store=True, max_age=0)
)
| bsd-3-clause | 3,761,206,188,576,800,300 | 50.039683 | 136 | 0.689006 | false |
sysalexis/kbengine | kbe/src/lib/python/Lib/ast.py | 91 | 12034 | """
ast
~~~
The `ast` module helps Python applications to process trees of the Python
abstract syntax grammar. The abstract syntax itself might change with
each Python release; this module helps to find out programmatically what
the current grammar looks like and allows modifications of it.
An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as
a flag to the `compile()` builtin function or by using the `parse()`
function from this module. The result will be a tree of objects whose
classes all inherit from `ast.AST`.
A modified abstract syntax tree can be compiled into a Python code object
using the built-in `compile()` function.
Additionally various helper functions are provided that make working with
the trees simpler. The main intention of the helper functions and this
module in general is to provide an easy to use interface for libraries
that work tightly with the python syntax (template engines for example).
:copyright: Copyright 2008 by Armin Ronacher.
:license: Python License.
"""
from _ast import *
def parse(source, filename='<unknown>', mode='exec'):
"""
Parse the source into an AST node.
Equivalent to compile(source, filename, mode, PyCF_ONLY_AST).
"""
return compile(source, filename, mode, PyCF_ONLY_AST)
def literal_eval(node_or_string):
"""
Safely evaluate an expression node or a string containing a Python
expression. The string or node provided may only consist of the following
Python literal structures: strings, bytes, numbers, tuples, lists, dicts,
sets, booleans, and None.
"""
if isinstance(node_or_string, str):
node_or_string = parse(node_or_string, mode='eval')
if isinstance(node_or_string, Expression):
node_or_string = node_or_string.body
def _convert(node):
if isinstance(node, (Str, Bytes)):
return node.s
elif isinstance(node, Num):
return node.n
elif isinstance(node, Tuple):
return tuple(map(_convert, node.elts))
elif isinstance(node, List):
return list(map(_convert, node.elts))
elif isinstance(node, Set):
return set(map(_convert, node.elts))
elif isinstance(node, Dict):
return dict((_convert(k), _convert(v)) for k, v
in zip(node.keys, node.values))
elif isinstance(node, NameConstant):
return node.value
elif isinstance(node, UnaryOp) and \
isinstance(node.op, (UAdd, USub)) and \
isinstance(node.operand, (Num, UnaryOp, BinOp)):
operand = _convert(node.operand)
if isinstance(node.op, UAdd):
return + operand
else:
return - operand
elif isinstance(node, BinOp) and \
isinstance(node.op, (Add, Sub)) and \
isinstance(node.right, (Num, UnaryOp, BinOp)) and \
isinstance(node.left, (Num, UnaryOp, BinOp)):
left = _convert(node.left)
right = _convert(node.right)
if isinstance(node.op, Add):
return left + right
else:
return left - right
raise ValueError('malformed node or string: ' + repr(node))
return _convert(node_or_string)
def dump(node, annotate_fields=True, include_attributes=False):
"""
Return a formatted dump of the tree in *node*. This is mainly useful for
debugging purposes. The returned string will show the names and the values
for fields. This makes the code impossible to evaluate, so if evaluation is
wanted *annotate_fields* must be set to False. Attributes such as line
numbers and column offsets are not dumped by default. If this is wanted,
*include_attributes* can be set to True.
"""
def _format(node):
if isinstance(node, AST):
fields = [(a, _format(b)) for a, b in iter_fields(node)]
rv = '%s(%s' % (node.__class__.__name__, ', '.join(
('%s=%s' % field for field in fields)
if annotate_fields else
(b for a, b in fields)
))
if include_attributes and node._attributes:
rv += fields and ', ' or ' '
rv += ', '.join('%s=%s' % (a, _format(getattr(node, a)))
for a in node._attributes)
return rv + ')'
elif isinstance(node, list):
return '[%s]' % ', '.join(_format(x) for x in node)
return repr(node)
if not isinstance(node, AST):
raise TypeError('expected AST, got %r' % node.__class__.__name__)
return _format(node)
def copy_location(new_node, old_node):
"""
Copy source location (`lineno` and `col_offset` attributes) from
*old_node* to *new_node* if possible, and return *new_node*.
"""
for attr in 'lineno', 'col_offset':
if attr in old_node._attributes and attr in new_node._attributes \
and hasattr(old_node, attr):
setattr(new_node, attr, getattr(old_node, attr))
return new_node
def fix_missing_locations(node):
"""
When you compile a node tree with compile(), the compiler expects lineno and
col_offset attributes for every node that supports them. This is rather
tedious to fill in for generated nodes, so this helper adds these attributes
recursively where not already set, by setting them to the values of the
parent node. It works recursively starting at *node*.
"""
def _fix(node, lineno, col_offset):
if 'lineno' in node._attributes:
if not hasattr(node, 'lineno'):
node.lineno = lineno
else:
lineno = node.lineno
if 'col_offset' in node._attributes:
if not hasattr(node, 'col_offset'):
node.col_offset = col_offset
else:
col_offset = node.col_offset
for child in iter_child_nodes(node):
_fix(child, lineno, col_offset)
_fix(node, 1, 0)
return node
def increment_lineno(node, n=1):
"""
Increment the line number of each node in the tree starting at *node* by *n*.
This is useful to "move code" to a different location in a file.
"""
for child in walk(node):
if 'lineno' in child._attributes:
child.lineno = getattr(child, 'lineno', 0) + n
return node
def iter_fields(node):
"""
Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields``
that is present on *node*.
"""
for field in node._fields:
try:
yield field, getattr(node, field)
except AttributeError:
pass
def iter_child_nodes(node):
"""
Yield all direct child nodes of *node*, that is, all fields that are nodes
and all items of fields that are lists of nodes.
"""
for name, field in iter_fields(node):
if isinstance(field, AST):
yield field
elif isinstance(field, list):
for item in field:
if isinstance(item, AST):
yield item
def get_docstring(node, clean=True):
"""
Return the docstring for the given node or None if no docstring can
be found. If the node provided does not have docstrings a TypeError
will be raised.
"""
if not isinstance(node, (FunctionDef, ClassDef, Module)):
raise TypeError("%r can't have docstrings" % node.__class__.__name__)
if node.body and isinstance(node.body[0], Expr) and \
isinstance(node.body[0].value, Str):
if clean:
import inspect
return inspect.cleandoc(node.body[0].value.s)
return node.body[0].value.s
def walk(node):
"""
Recursively yield all descendant nodes in the tree starting at *node*
(including *node* itself), in no specified order. This is useful if you
only want to modify nodes in place and don't care about the context.
"""
from collections import deque
todo = deque([node])
while todo:
node = todo.popleft()
todo.extend(iter_child_nodes(node))
yield node
class NodeVisitor(object):
"""
A node visitor base class that walks the abstract syntax tree and calls a
visitor function for every node found. This function may return a value
which is forwarded by the `visit` method.
This class is meant to be subclassed, with the subclass adding visitor
methods.
Per default the visitor functions for the nodes are ``'visit_'`` +
class name of the node. So a `TryFinally` node visit function would
be `visit_TryFinally`. This behavior can be changed by overriding
the `visit` method. If no visitor function exists for a node
(return value `None`) the `generic_visit` visitor is used instead.
Don't use the `NodeVisitor` if you want to apply changes to nodes during
traversing. For this a special visitor exists (`NodeTransformer`) that
allows modifications.
"""
def visit(self, node):
"""Visit a node."""
method = 'visit_' + node.__class__.__name__
visitor = getattr(self, method, self.generic_visit)
return visitor(node)
def generic_visit(self, node):
"""Called if no explicit visitor function exists for a node."""
for field, value in iter_fields(node):
if isinstance(value, list):
for item in value:
if isinstance(item, AST):
self.visit(item)
elif isinstance(value, AST):
self.visit(value)
class NodeTransformer(NodeVisitor):
"""
A :class:`NodeVisitor` subclass that walks the abstract syntax tree and
allows modification of nodes.
The `NodeTransformer` will walk the AST and use the return value of the
visitor methods to replace or remove the old node. If the return value of
the visitor method is ``None``, the node will be removed from its location,
otherwise it is replaced with the return value. The return value may be the
original node in which case no replacement takes place.
Here is an example transformer that rewrites all occurrences of name lookups
(``foo``) to ``data['foo']``::
class RewriteName(NodeTransformer):
def visit_Name(self, node):
return copy_location(Subscript(
value=Name(id='data', ctx=Load()),
slice=Index(value=Str(s=node.id)),
ctx=node.ctx
), node)
Keep in mind that if the node you're operating on has child nodes you must
either transform the child nodes yourself or call the :meth:`generic_visit`
method for the node first.
For nodes that were part of a collection of statements (that applies to all
statement nodes), the visitor may also return a list of nodes rather than
just a single node.
Usually you use the transformer like this::
node = YourTransformer().visit(node)
"""
def generic_visit(self, node):
for field, old_value in iter_fields(node):
old_value = getattr(node, field, None)
if isinstance(old_value, list):
new_values = []
for value in old_value:
if isinstance(value, AST):
value = self.visit(value)
if value is None:
continue
elif not isinstance(value, AST):
new_values.extend(value)
continue
new_values.append(value)
old_value[:] = new_values
elif isinstance(old_value, AST):
new_node = self.visit(old_value)
if new_node is None:
delattr(node, field)
else:
setattr(node, field, new_node)
return node
| lgpl-3.0 | 6,038,490,974,220,331,000 | 37.203175 | 81 | 0.60703 | false |
purisc-group/purisc-backend | class_def/conversions/terminator.py | 1 | 4842 | import re
from helpers import subleq
from helpers import next_subleq
from helpers import clear
def branchParseArgs(argStr):
args = [];
condition = re.findall("(?<=i1\s)[^,]*(?=,)",argStr);
if len(condition) > 0:
args.append(condition[0]);
args.append(re.findall("(?<=label\s)[^,]*(?=,)",argStr)[0]);
args.append(re.findall("(?<=label\s)[^,]*$",argStr)[0]);
else:
args.append(re.findall("(?<=label\s).*",argStr)[0])
return args
def branch(instr, assem):
#branch can take two forms: unconditional branch and a conditional branch
t0 = "t" + str(assem.stackCount);
assem.stackCount += 1;
t1 = "t" + str(assem.stackCount);
assem.stackCount += 1;
if len(instr.args) == 1:
#unconditional branch, the argument is actually a label
assem.progMem.append(subleq(t0,t0,instr.args[0]));
else:
#conditional branch
a = instr.args[0];
b = instr.args[1];
c = instr.args[2];
notPos = assem.getNextReserved("notPos");
assem.progMem.append("\n// " + instr.raw);
assem.subleq(t0,t0,"NEXT");
assem.subleq(-1,t0,"NEXT");
assem.subleq(a,t0,b);
assem.subleq(t1,t1,"NEXT");
assem.subleq(t0,t1,"NEXT");
assem.subleq(-2,t1,b);
assem.subleq(t0,t0,c);
assem.dataMem[-1] = -1;
assem.dataMem[-2] = -2;
def labelParseArgs(argStr):
label = re.findall("\d+",argStr)[0];
return ["%" + str(label)]
def label(instr, assem):
assem.progMem.append("\n// " + instr.raw);
assem.progMem.append(instr.args[0] + ":");
def returnParseArgs(argStr):
arg = re.findall("(?<=i32)\s+\S+|void",argStr)[0];
if arg == "void":
arg = "__VOID__";
return [arg];
def returnF(instr, assem):
ret = instr.args[0];
t0 = assem.getNextTemp();
assem.progMem.append("\n// " + instr.raw)
if ret != "__VOID__":
assem.subleq("return", "return", "NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq(ret,t0,"NEXT");
assem.subleq(t0,"return","NEXT");
assem.subleq(t0,t0,"#-1");
def callParseArgs(argStr):
print argStr
name = re.findall("(?<=i\d\d)\s+\S+(?=\()",argStr)[0].strip();
argsRaw = re.findall("(?<=\().*(?=\))",argStr)[0].strip();
args = argsRaw.split(",");
for i in range(0,len(args)):
args[i] = re.sub("i\d\d\s+","",args[i]).strip();
return [name] + args;
def call(instr, assem):
name = instr.args[0];
args = instr.args[1:];
if not name in builtInFunction:
print "error - attempting to call non-built in function, don't support functions...yet"
sys.exit(2);
builtInFunction[name](instr, assem);
def getGlobalId(instr, assem):
result = instr.result;
dim = instr.args[1]; #args[0] contains the function name
#add the literal to the data memory if necessary
if re.match("-?\d+",dim.strip()):
dim = dim.strip();
assem.dataMem[dim] = dim;
t0 = assem.getNextTemp();
t1 = assem.getNextTemp();
globIds = "glob_ids";
glob_0 = assem.getNextReserved("glob_0");
work_dim = "work_dims";
error = assem.getNextReserved("dim_error");
finish = assem.getNextReserved("finish");
continue0 = assem.getNextReserved("continue");
continue1 = assem.getNextReserved("continue");
assem.progMem.append("\n// " + instr.raw);
#check input is between 0 and work_dim() - 1
assem.subleq(t0,t0,"NEXT");
assem.subleq(dim,t0,continue0);
assem.subleq(t0,t0,error);
assem.subleq(continue0 + ":1",t0,"NEXT");
assem.subleq(t1,t1,"NEXT");
assem.subleq(t0,t1,"NEXT");
assem.subleq(work_dim,t1,continue1);
assem.subleq(t0,t0,error);
#get pointer value to the global id you want
assem.subleq(continue1 + ":" + t0,t0,"NEXT");
assem.subleq(globIds,t0,"NEXT");
assem.subleq(dim,t0,"NEXT"); #make t0 = -globIds - dim so we don't have to flip it twice below
#rewrite the instructions with the right global address
assem.subleq(glob_0,glob_0,"NEXT");
assem.subleq(t0,glob_0,"NEXT");
#store the current index value in the result
assem.subleq(result,result,"NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq(glob_0 + ":#1",t0,"NEXT");
assem.subleq(t0,result,"NEXT");
assem.subleq(t0,t0,finish);
assem.subleq(error + ":" + result,result,"NEXT"); #return 0 in the case of invalid input ( < 0, > dim-1)
assem.subleq(finish + ":" + t0,t0,"NEXT");
assem.dataMem["1"] = 1;
def getWorkDim(instr, assem):
result = instr.result;
t0 = assem.getNextReserved();
assem.subleq(result,result,"NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq("work_dims",t0,"NEXT");
assem.subleq(t0,result,"NEXT");
builtInFunction = {
"@get_global_id" : getGlobalId,
"@get_work_dim" : getWorkDim
}
| gpl-2.0 | -4,045,357,002,096,055,000 | 28.168675 | 108 | 0.596448 | false |
ppaez/flyinghigh-opengl-from-python | flyinghigh/geometry/tests/testOrientation.py | 2 | 3360 |
try:
# Python 2.6 with unittest2 installed
from unittest2 import TestCase, main
except:
# Python 2.7
from unittest import TestCase, main
from math import pi
from ... import gl
from ..vec3 import NegXAxis, NegYAxis, NegZAxis, Vec3, XAxis, YAxis, ZAxis
from ..orientation import Orientation
class testOrientation(TestCase):
def testConstructionDefaults(self):
o = Orientation()
self.assertEqual(o.forward, NegZAxis)
self.assertEqual(o.up, YAxis)
self.assertEqual(o.right, XAxis)
def testConstructionConvertsBareTuples(self):
o = Orientation(XAxis, ZAxis)
self.assertEquals(o.forward, XAxis)
self.assertTrue(isinstance(o.forward, Vec3))
self.assertEquals(o.up, ZAxis)
self.assertTrue(isinstance(o.up, Vec3))
self.assertEquals(o.right, NegYAxis)
self.assertTrue(isinstance(o.right, Vec3))
def testConstructionNormalises(self):
o = Orientation((1, 2, 3))
self.assertAlmostEquals(o.forward.length, 1, places=15)
self.assertAlmostEquals(o.up.length, 1, places=15)
self.assertAlmostEquals(o.right.length, 1, places=15)
def testConstructionBarfsOnNonOrthogonalVectors(self):
self.assertRaises(AssertionError,
lambda: Orientation((1, 2, 3), (3, -2, 1)))
def testConstructionProvidesDefaultUp(self):
self.assertEqual(Orientation(XAxis).up, YAxis)
self.assertEqual(Orientation(YAxis).up, ZAxis)
self.assertEqual(Orientation(NegYAxis).up, NegZAxis)
def testStr(self):
self.assertEqual(str(Orientation(XAxis, up=YAxis)),
'Orientation(Vec3(1, 0, 0), up=Vec3(0, 1, 0))')
def testEqual(self):
a = Orientation((0, 2, 3))
self.assertTrue(a == Orientation((0, 2, 3)))
self.assertFalse(a == Orientation((11, 2, 3)))
self.assertFalse(a == Orientation((0, 2, 3), up=(0, -3, 2)))
self.assertFalse(a == 123)
def testNotEqual(self):
a = Orientation((0, 2, 3))
self.assertFalse(a != Orientation((0, 2, 3)))
self.assertTrue(a != Orientation((11, 2, 3)))
self.assertTrue(a != Orientation((0, 2, 3), up=(0, -3, 2)))
self.assertTrue(a != 123)
def testHash(self):
a = Orientation((0, 2, 3))
self.assertRaises(TypeError, lambda: hash(a))
def testRoll(self):
o = Orientation(ZAxis)
o.roll(pi/2)
self.assertEqual(o, Orientation(ZAxis, up=NegXAxis))
def testYaw(self):
o = Orientation(ZAxis)
o.yaw(pi/2)
self.assertEqual(o, Orientation(NegXAxis))
def testPitch(self):
o = Orientation(ZAxis)
o.pitch(pi/2)
self.assertEqual(o, Orientation(YAxis, up=NegZAxis))
def testMatrix(self):
o = Orientation((1, 2, 3))
self.assertEquals(type(o.matrix), gl.GLfloat * 16)
expected = [
o.right.x, o.right.y, o.right.z, 0,
o.up.x, o.up.y, o.up.z, 0,
-o.forward.x, -o.forward.y, -o.forward.z, 0,
0, 0, 0, 1,
]
for a, e in zip(o.matrix, expected):
self.assertAlmostEqual(a, e)
if __name__ == '__main__':
main()
| bsd-3-clause | 6,313,667,862,939,793,000 | 31.267327 | 74 | 0.579464 | false |
mgax/beancount | lib/python/beancount/fallback/elementtree/SimpleXMLWriter.py | 21 | 8616 | #
# SimpleXMLWriter
# $Id: SimpleXMLWriter.py 3265 2007-09-06 20:42:00Z fredrik $
#
# a simple XML writer
#
# history:
# 2001-12-28 fl created
# 2002-11-25 fl fixed attribute encoding
# 2002-12-02 fl minor fixes for 1.5.2
# 2004-06-17 fl added pythondoc markup
# 2004-07-23 fl added flush method (from Jay Graves)
# 2004-10-03 fl added declaration method
#
# Copyright (c) 2001-2004 by Fredrik Lundh
#
# fredrik@pythonware.com
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The SimpleXMLWriter module is
#
# Copyright (c) 2001-2004 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
##
# Tools to write XML files, without having to deal with encoding
# issues, well-formedness, etc.
# <p>
# The current version does not provide built-in support for
# namespaces. To create files using namespaces, you have to provide
# "xmlns" attributes and explicitly add prefixes to tags and
# attributes.
#
# <h3>Patterns</h3>
#
# The following example generates a small XHTML document.
# <pre>
#
# from elementtree.SimpleXMLWriter import XMLWriter
# import sys
#
# w = XMLWriter(sys.stdout)
#
# html = w.start("html")
#
# w.start("head")
# w.element("title", "my document")
# w.element("meta", name="generator", value="my application 1.0")
# w.end()
#
# w.start("body")
# w.element("h1", "this is a heading")
# w.element("p", "this is a paragraph")
#
# w.start("p")
# w.data("this is ")
# w.element("b", "bold")
# w.data(" and ")
# w.element("i", "italic")
# w.data(".")
# w.end("p")
#
# w.close(html)
# </pre>
##
import re, sys, string
try:
unicode("")
except NameError:
def encode(s, encoding):
# 1.5.2: application must use the right encoding
return s
_escape = re.compile(r"[&<>\"\x80-\xff]+") # 1.5.2
else:
def encode(s, encoding):
return s.encode(encoding)
_escape = re.compile(eval(r'u"[&<>\"\u0080-\uffff]+"'))
def encode_entity(text, pattern=_escape):
# map reserved and non-ascii characters to numerical entities
def escape_entities(m):
out = []
for char in m.group():
out.append("&#%d;" % ord(char))
return string.join(out, "")
return encode(pattern.sub(escape_entities, text), "ascii")
del _escape
#
# the following functions assume an ascii-compatible encoding
# (or "utf-16")
def escape_cdata(s, encoding=None, replace=string.replace):
s = replace(s, "&", "&")
s = replace(s, "<", "<")
s = replace(s, ">", ">")
if encoding:
try:
return encode(s, encoding)
except UnicodeError:
return encode_entity(s)
return s
def escape_attrib(s, encoding=None, replace=string.replace):
s = replace(s, "&", "&")
s = replace(s, "'", "'")
s = replace(s, "\"", """)
s = replace(s, "<", "<")
s = replace(s, ">", ">")
if encoding:
try:
return encode(s, encoding)
except UnicodeError:
return encode_entity(s)
return s
##
# XML writer class.
#
# @param file A file or file-like object. This object must implement
# a <b>write</b> method that takes an 8-bit string.
# @param encoding Optional encoding.
class XMLWriter:
def __init__(self, file, encoding="us-ascii"):
if not hasattr(file, "write"):
file = open(file, "w")
self.__write = file.write
if hasattr(file, "flush"):
self.flush = file.flush
self.__open = 0 # true if start tag is open
self.__tags = []
self.__data = []
self.__encoding = encoding
def __flush(self):
# flush internal buffers
if self.__open:
self.__write(">")
self.__open = 0
if self.__data:
data = string.join(self.__data, "")
self.__write(escape_cdata(data, self.__encoding))
self.__data = []
##
# Writes an XML declaration.
def declaration(self):
encoding = self.__encoding
if encoding == "us-ascii" or encoding == "utf-8":
self.__write("<?xml version='1.0'?>\n")
else:
self.__write("<?xml version='1.0' encoding='%s'?>\n" % encoding)
##
# Opens a new element. Attributes can be given as keyword
# arguments, or as a string/string dictionary. You can pass in
# 8-bit strings or Unicode strings; the former are assumed to use
# the encoding passed to the constructor. The method returns an
# opaque identifier that can be passed to the <b>close</b> method,
# to close all open elements up to and including this one.
#
# @param tag Element tag.
# @param attrib Attribute dictionary. Alternatively, attributes
# can be given as keyword arguments.
# @return An element identifier.
def start(self, tag, attrib={}, **extra):
self.__flush()
tag = escape_cdata(tag, self.__encoding)
self.__data = []
self.__tags.append(tag)
self.__write("<%s" % tag)
if attrib or extra:
attrib = attrib.copy()
attrib.update(extra)
attrib = attrib.items()
attrib.sort()
for k, v in attrib:
k = escape_cdata(k, self.__encoding)
v = escape_attrib(v, self.__encoding)
self.__write(" %s=\"%s\"" % (k, v))
self.__open = 1
return len(self.__tags)-1
##
# Adds a comment to the output stream.
#
# @param comment Comment text, as an 8-bit string or Unicode string.
def comment(self, comment):
self.__flush()
self.__write("<!-- %s -->\n" % escape_cdata(comment, self.__encoding))
##
# Adds character data to the output stream.
#
# @param text Character data, as an 8-bit string or Unicode string.
def data(self, text):
self.__data.append(text)
##
# Closes the current element (opened by the most recent call to
# <b>start</b>).
#
# @param tag Element tag. If given, the tag must match the start
# tag. If omitted, the current element is closed.
def end(self, tag=None):
if tag:
assert self.__tags, "unbalanced end(%s)" % tag
assert escape_cdata(tag, self.__encoding) == self.__tags[-1],\
"expected end(%s), got %s" % (self.__tags[-1], tag)
else:
assert self.__tags, "unbalanced end()"
tag = self.__tags.pop()
if self.__data:
self.__flush()
elif self.__open:
self.__open = 0
self.__write(" />")
return
self.__write("</%s>" % tag)
##
# Closes open elements, up to (and including) the element identified
# by the given identifier.
#
# @param id Element identifier, as returned by the <b>start</b> method.
def close(self, id):
while len(self.__tags) > id:
self.end()
##
# Adds an entire element. This is the same as calling <b>start</b>,
# <b>data</b>, and <b>end</b> in sequence. The <b>text</b> argument
# can be omitted.
def element(self, tag, text=None, attrib={}, **extra):
apply(self.start, (tag, attrib), extra)
if text:
self.data(text)
self.end()
##
# Flushes the output stream.
def flush(self):
pass # replaced by the constructor
| gpl-2.0 | 5,425,764,246,910,292,000 | 29.88172 | 78 | 0.59552 | false |
pztrick/django-allauth | allauth/socialaccount/providers/github/views.py | 8 | 2024 | import requests
from allauth.socialaccount import app_settings
from allauth.socialaccount.providers.github.provider import GitHubProvider
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
class GitHubOAuth2Adapter(OAuth2Adapter):
provider_id = GitHubProvider.id
settings = app_settings.PROVIDERS.get(provider_id, {})
if 'GITHUB_URL' in settings:
web_url = settings.get('GITHUB_URL').rstrip('/')
api_url = '{0}/api/v3'.format(web_url)
else:
web_url = 'https://github.com'
api_url = 'https://api.github.com'
access_token_url = '{0}/login/oauth/access_token'.format(web_url)
authorize_url = '{0}/login/oauth/authorize'.format(web_url)
profile_url = '{0}/user'.format(api_url)
emails_url = '{0}/user/emails'.format(api_url)
def complete_login(self, request, app, token, **kwargs):
params = {'access_token': token.token}
resp = requests.get(self.profile_url, params=params)
extra_data = resp.json()
if app_settings.QUERY_EMAIL and not extra_data.get('email'):
extra_data['email'] = self.get_email(token)
return self.get_provider().sociallogin_from_response(
request, extra_data
)
def get_email(self, token):
email = None
params = {'access_token': token.token}
resp = requests.get(self.emails_url, params=params)
emails = resp.json()
if resp.status_code == 200 and emails:
email = emails[0]
primary_emails = [
e for e in emails
if not isinstance(e, dict) or e.get('primary')
]
if primary_emails:
email = primary_emails[0]
if isinstance(email, dict):
email = email.get('email', '')
return email
oauth2_login = OAuth2LoginView.adapter_view(GitHubOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(GitHubOAuth2Adapter)
| mit | -8,545,775,042,350,054,000 | 34.508772 | 74 | 0.627964 | false |
CruiseDevice/coala | tests/core/DependencyBearTest.py | 4 | 8499 | from concurrent.futures import ThreadPoolExecutor
from coalib.core.DependencyBear import DependencyBear
from coalib.core.FileBear import FileBear
from coalib.core.ProjectBear import ProjectBear
from coalib.settings.Section import Section
from tests.core.CoreTestBase import CoreTestBase
class TestProjectBear(ProjectBear):
def analyze(self, files):
yield ', '.join('{}({})'.format(filename, len(files[filename]))
for filename in sorted(files))
class TestFileBear(FileBear):
def analyze(self, filename, file):
yield '{}:{}'.format(filename, len(file))
class TestBearDependentOnProjectBear(DependencyBear):
BEAR_DEPS = {TestProjectBear}
def analyze(self, dependency_bear, dependency_result):
yield '{} - {}'.format(dependency_bear.name, dependency_result)
class TestBearDependentOnFileBear(DependencyBear):
BEAR_DEPS = {TestFileBear}
def analyze(self, dependency_bear, dependency_result):
yield '{} - {}'.format(dependency_bear.name, dependency_result)
class TestBearDependentOnMultipleBears(DependencyBear):
BEAR_DEPS = {TestFileBear, TestProjectBear}
def analyze(self, dependency_bear, dependency_result, a_number=100):
yield '{} ({}) - {}'.format(
dependency_bear.name, a_number, dependency_result)
class DependencyBearTest(CoreTestBase):
def assertResultsEqual(self, bear_type, expected,
section=None, file_dict=None):
"""
Asserts whether the expected results do match the output of the bear.
Asserts for the results out-of-order.
:param bear_type:
The bear class to check.
:param expected:
A sequence of expected results.
:param section:
A section for the bear to use. By default uses a new section with
name ``test-section``.
:param file_dict:
A file-dictionary for the bear to use. By default uses an empty
dictionary.
"""
if section is None:
section = Section('test-section')
if file_dict is None:
file_dict = {}
uut = bear_type(section, file_dict)
results = self.execute_run({uut})
self.assertEqual(sorted(expected), sorted(results))
def test_projectbear_dependency(self):
# Dependency results are also catched in the result callback, thus they
# are included in the final result list.
self.assertResultsEqual(
TestBearDependentOnProjectBear,
file_dict={},
expected=['',
'TestProjectBear - '])
self.assertResultsEqual(
TestBearDependentOnProjectBear,
file_dict={'fileX': []},
expected=['fileX(0)',
'TestProjectBear - fileX(0)'])
self.assertResultsEqual(
TestBearDependentOnProjectBear,
file_dict={'fileX': [], 'fileY': ['hello']},
expected=['fileX(0), fileY(1)',
'TestProjectBear - fileX(0), fileY(1)'])
self.assertResultsEqual(
TestBearDependentOnProjectBear,
file_dict={'fileX': [], 'fileY': ['hello'], 'fileZ': ['x\n', 'y']},
expected=['fileX(0), fileY(1), fileZ(2)',
'TestProjectBear - fileX(0), fileY(1), fileZ(2)'])
def test_filebear_dependency(self):
# Dependency results are also catched in the result callback, thus they
# are included in the final result list.
self.assertResultsEqual(
TestBearDependentOnFileBear,
file_dict={},
expected=[])
self.assertResultsEqual(
TestBearDependentOnFileBear,
file_dict={'fileX': []},
expected=['fileX:0',
'TestFileBear - fileX:0'])
self.assertResultsEqual(
TestBearDependentOnFileBear,
file_dict={'fileX': [], 'fileY': ['hello']},
expected=['fileX:0',
'fileY:1',
'TestFileBear - fileX:0',
'TestFileBear - fileY:1'])
self.assertResultsEqual(
TestBearDependentOnFileBear,
file_dict={'fileX': [], 'fileY': ['hello'], 'fileZ': ['x\n', 'y']},
expected=['fileX:0',
'fileY:1',
'fileZ:2',
'TestFileBear - fileX:0',
'TestFileBear - fileY:1',
'TestFileBear - fileZ:2'])
def test_multiple_bears_dependencies(self):
# Dependency results are also catched in the result callback, thus they
# are included in the final result list.
self.assertResultsEqual(
TestBearDependentOnMultipleBears,
file_dict={},
expected=['',
'TestProjectBear (100) - '])
self.assertResultsEqual(
TestBearDependentOnMultipleBears,
file_dict={'fileX': []},
expected=['fileX(0)',
'TestProjectBear (100) - fileX(0)',
'fileX:0',
'TestFileBear (100) - fileX:0'])
self.assertResultsEqual(
TestBearDependentOnMultipleBears,
file_dict={'fileX': [], 'fileY': ['hello']},
expected=['fileX(0), fileY(1)',
'TestProjectBear (100) - fileX(0), fileY(1)',
'fileX:0',
'fileY:1',
'TestFileBear (100) - fileX:0',
'TestFileBear (100) - fileY:1'])
self.assertResultsEqual(
TestBearDependentOnMultipleBears,
file_dict={'fileX': [], 'fileY': ['hello'], 'fileZ': ['x\n', 'y']},
expected=['fileX(0), fileY(1), fileZ(2)',
'TestProjectBear (100) - fileX(0), fileY(1), fileZ(2)',
'fileX:0',
'fileY:1',
'fileZ:2',
'TestFileBear (100) - fileX:0',
'TestFileBear (100) - fileY:1',
'TestFileBear (100) - fileZ:2'])
def test_multiple_bears_dependencies_with_parameter(self):
section = Section('test-section')
section['a_number'] = '500'
# Dependency results are also catched in the result callback, thus they
# are included in the final result list.
self.assertResultsEqual(
TestBearDependentOnMultipleBears,
section=section,
file_dict={},
expected=['',
'TestProjectBear (500) - '])
self.assertResultsEqual(
TestBearDependentOnMultipleBears,
section=section,
file_dict={'fileX': []},
expected=['fileX(0)',
'TestProjectBear (500) - fileX(0)',
'fileX:0',
'TestFileBear (500) - fileX:0'])
self.assertResultsEqual(
TestBearDependentOnMultipleBears,
section=section,
file_dict={'fileX': [], 'fileY': ['hello']},
expected=['fileX(0), fileY(1)',
'TestProjectBear (500) - fileX(0), fileY(1)',
'fileX:0',
'fileY:1',
'TestFileBear (500) - fileX:0',
'TestFileBear (500) - fileY:1'])
self.assertResultsEqual(
TestBearDependentOnMultipleBears,
section=section,
file_dict={'fileX': [], 'fileY': ['hello'], 'fileZ': ['x\n', 'y']},
expected=['fileX(0), fileY(1), fileZ(2)',
'TestProjectBear (500) - fileX(0), fileY(1), fileZ(2)',
'fileX:0',
'fileY:1',
'fileZ:2',
'TestFileBear (500) - fileX:0',
'TestFileBear (500) - fileY:1',
'TestFileBear (500) - fileZ:2'])
# Execute the same tests from DependencyBearTest, but use a ThreadPoolExecutor
# instead. It shall also seamlessly work with Python threads. Also there are
# coverage issues on Windows with ProcessPoolExecutor as coverage data isn't
# passed properly back from the pool processes.
class DependencyBearOnThreadPoolExecutorTest(DependencyBearTest):
def setUp(self):
super().setUp()
self.executor = ThreadPoolExecutor, tuple(), dict(max_workers=8)
| agpl-3.0 | -4,377,442,469,723,759,000 | 38.165899 | 79 | 0.544535 | false |
Ircam-Web/mezzanine-organization | organization/network/migrations/0138_auto_20190619_2047.py | 1 | 1107 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-06-19 18:47
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('organization-network', '0137_auto_20190619_2021'),
]
operations = [
migrations.AlterModelOptions(
name='organization',
options={'ordering': ['name'], 'permissions': (('user_edit', 'Mezzo - User can edit its own content'), ('user_delete', 'Mezzo - User can delete its own content'), ('team_edit', "Mezzo - User can edit his team's content"), ('team_delete', "Mezzo - User can delete his team's content")), 'verbose_name': 'organization'},
),
migrations.AlterField(
model_name='organization',
name='user',
field=models.ForeignKey(default=4, on_delete=django.db.models.deletion.CASCADE, related_name='organizations', to=settings.AUTH_USER_MODEL, verbose_name='Author'),
preserve_default=False,
),
]
| agpl-3.0 | 4,209,029,235,910,933,000 | 40 | 330 | 0.64589 | false |
ThiefMaster/indico | indico/modules/events/tracks/forms.py | 3 | 2022 | # This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from wtforms.ext.sqlalchemy.fields import QuerySelectField
from wtforms.fields import StringField
from wtforms.validators import DataRequired
from indico.core.db.sqlalchemy.descriptions import RenderMode
from indico.modules.events.sessions.models.sessions import Session
from indico.modules.events.tracks.models.groups import TrackGroup
from indico.util.i18n import _
from indico.web.forms.base import IndicoForm, generated_data
from indico.web.forms.fields import IndicoMarkdownField
class TrackForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()])
code = StringField(_('Code'))
track_group = QuerySelectField(_('Track group'), default='', allow_blank=True, get_label='title',
description=_('Select a track group to which this track should belong'))
default_session = QuerySelectField(_('Default session'), default='', allow_blank=True, get_label='title',
description=_('Indico will preselect this session whenever an abstract is '
'accepted for the track'))
description = IndicoMarkdownField(_('Description'), editor=True)
def __init__(self, *args, **kwargs):
event = kwargs.pop('event')
super().__init__(*args, **kwargs)
self.default_session.query = Session.query.with_parent(event)
self.track_group.query = TrackGroup.query.with_parent(event)
class ProgramForm(IndicoForm):
program = IndicoMarkdownField(_('Programme'), editor=True, mathjax=True)
@generated_data
def program_render_mode(self):
return RenderMode.markdown
class TrackGroupForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()])
description = IndicoMarkdownField(_('Description'), editor=True)
| mit | -7,091,289,734,348,802,000 | 42.021277 | 114 | 0.691889 | false |
code-google-com/cortex-vfx | python/IECoreMaya/TemporaryAttributeValues.py | 12 | 4259 | ##########################################################################
#
# Copyright (c) 2009-2012, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import maya.cmds
import maya.OpenMaya
import IECore
import StringUtil
## A context manager for controlling attribute values in with statements. It
# sets attributes to requested values on entering the block and resets them to
# their previous values on exiting the block.
class TemporaryAttributeValues :
def __init__( self, attributeAndValues = {}, **kw ) :
self.__attributesAndValues = attributeAndValues
self.__attributesAndValues.update( kw )
def __enter__( self ) :
handlers = {
"enum" : self.__simpleAttrHandler,
"bool" : self.__simpleAttrHandler,
"float" : self.__simpleAttrHandler,
"long" : self.__simpleAttrHandler,
"short" : self.__simpleAttrHandler,
"float2" : IECore.curry( self.__numeric2AttrHandler, attributeType="float2" ),
"long2" : IECore.curry( self.__numeric2AttrHandler, attributeType="long2" ),
"short2" : IECore.curry( self.__numeric2AttrHandler, attributeType="short2" ),
"float3" : IECore.curry( self.__numeric3AttrHandler, attributeType="float3" ),
"long3" : IECore.curry( self.__numeric3AttrHandler, attributeType="long3" ),
"short3" : IECore.curry( self.__numeric3AttrHandler, attributeType="short3" ),
"string" : self.__stringAttrHandler,
}
self.__restoreCommands = []
for attr, value in self.__attributesAndValues.items() :
# check we can handle this type
attrType = maya.cmds.getAttr( attr, type=True )
handler = handlers.get( attrType, None )
if not handler :
raise TypeError( "Attribute \"%s\" has unsupported type \"%s\"." % ( attr, attrType ) )
# store a command to restore the attribute value later
origValue = maya.cmds.getAttr( attr )
if isinstance( origValue, list ) and isinstance( origValue[0], tuple ) :
origValue = origValue[0]
self.__restoreCommands.append( IECore.curry( handler, attr, origValue ) )
# and change the attribute value
handler( attr, value )
def __exit__( self, type, value, traceBack ) :
for cmd in self.__restoreCommands :
cmd()
def __simpleAttrHandler( self, attr, value ) :
maya.cmds.setAttr( attr, value )
def __numeric2AttrHandler( self, attr, value, attributeType ) :
maya.cmds.setAttr( attr, value[0], value[1], type=attributeType )
def __numeric3AttrHandler( self, attr, value, attributeType ) :
maya.cmds.setAttr( attr, value[0], value[1], value[2], type=attributeType )
def __stringAttrHandler( self, attr, value ) :
maya.cmds.setAttr( attr, value, type="string" )
| bsd-3-clause | -7,749,179,435,043,762,000 | 38.435185 | 91 | 0.694294 | false |
JustinDrake/openbazaar-go | qa/smtp_notification.py | 1 | 5877 | import requests
import json
import time
import subprocess
import re
import os
from collections import OrderedDict
from test_framework.test_framework import OpenBazaarTestFramework, TestFailure
from test_framework.smtp_server import SMTP_DUMPFILE
class SMTPTest(OpenBazaarTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 2
def run_test(self):
alice = self.nodes[0]
bob = self.nodes[1]
# configure SMTP notifications
time.sleep(4)
api_url = alice["gateway_url"] + "ob/settings"
smtp = {
"smtpSettings" : {
"notifications": True,
"serverAddress": "0.0.0.0:1025",
"username": "usr",
"password": "passwd",
"senderEmail": "openbazaar@test.org",
"recipientEmail": "user.openbazaar@test.org"
}
}
r = requests.post(api_url, data=json.dumps(smtp, indent=4))
if r.status_code == 404:
raise TestFailure("SMTPTest - FAIL: Settings POST endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("SMTPTest - FAIL: Settings POST failed. Reason: %s", resp["reason"])
time.sleep(4)
# check SMTP settings
api_url = alice["gateway_url"] + "ob/settings"
r = requests.get(api_url)
if r.status_code == 404:
raise TestFailure("SMTPTest - FAIL: Settings GET endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("SMTPTest - FAIL: Settings GET failed. Reason: %s", resp["reason"])
# check notifications
addr = "0.0.0.0:1025"
class_name = "test_framework.smtp_server.SMTPTestServer"
proc = subprocess.Popen(["python", "-m", "smtpd", "-n", "-c", class_name, addr])
# generate some coins and send them to bob
time.sleep(4)
api_url = bob["gateway_url"] + "wallet/address"
r = requests.get(api_url)
if r.status_code == 200:
resp = json.loads(r.text)
address = resp["address"]
elif r.status_code == 404:
raise TestFailure("SMTPTest - FAIL: Address endpoint not found")
else:
raise TestFailure("SMTPTest - FAIL: Unknown response")
self.send_bitcoin_cmd("sendtoaddress", address, 10)
time.sleep(20)
# post listing to alice
with open('testdata/listing.json') as listing_file:
listing_json = json.load(listing_file, object_pairs_hook=OrderedDict)
api_url = alice["gateway_url"] + "ob/listing"
r = requests.post(api_url, data=json.dumps(listing_json, indent=4))
if r.status_code == 404:
raise TestFailure("SMTPTest - FAIL: Listing post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("SMTPTest - FAIL: Listing POST failed. Reason: %s", resp["reason"])
time.sleep(4)
# get listing hash
api_url = alice["gateway_url"] + "ipns/" + alice["peerId"] + "/listings/index.json"
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("SMTPTest - FAIL: Couldn't get listing index")
resp = json.loads(r.text)
listingId = resp[0]["hash"]
# bob send order
with open('testdata/order_direct.json') as order_file:
order_json = json.load(order_file, object_pairs_hook=OrderedDict)
order_json["items"][0]["listingHash"] = listingId
api_url = bob["gateway_url"] + "ob/purchase"
r = requests.post(api_url, data=json.dumps(order_json, indent=4))
if r.status_code == 404:
raise TestFailure("SMTPTest - FAIL: Purchase post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("SMTPTest - FAIL: Purchase POST failed. Reason: %s", resp["reason"])
resp = json.loads(r.text)
orderId = resp["orderId"]
payment_address = resp["paymentAddress"]
payment_amount = resp["amount"]
# fund order
spend = {
"address": payment_address,
"amount": payment_amount,
"feeLevel": "NORMAL"
}
api_url = bob["gateway_url"] + "wallet/spend"
r = requests.post(api_url, data=json.dumps(spend, indent=4))
if r.status_code == 404:
raise TestFailure("SMTPTest - FAIL: Spend post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("SMTPTest - FAIL: Spend POST failed. Reason: %s", resp["reason"])
time.sleep(20)
proc.terminate()
# check notification
expected = '''From: openbazaar@test.org
To: user.openbazaar@test.org
MIME-Version: 1.0
Content-Type: text/html; charset=UTF-8
Subject: [OpenBazaar] Order received
You received an order "Ron Swanson Tshirt".
Order ID: QmNiPgKNq27qQE8fRxMbtDfRcFDEYMH5wDRgdqtqoWBpGg
Buyer: QmS5svqgGwFxwY9W5nXBUh1GJ7x8tqpkYfD4kB3MG7mPRv
Thumbnail: QmNedYJ6WmLhacAL2ozxb4k33Gxd9wmKB7HyoxZCwXid1e
Timestamp: 1487699826
'''
expected_lines = [e for e in expected.splitlines() if not e.startswith('Timestamp:') and not e.startswith('Order ID:')]
with open(SMTP_DUMPFILE, 'r') as f:
res_lines = [l.strip() for l in f.readlines() if not l.startswith('Timestamp') and not l.startswith('Order ID:')]
if res_lines != expected_lines:
raise TestFailure("SMTPTest - FAIL: Incorrect mail data received")
os.remove(SMTP_DUMPFILE)
print("SMTPTest - PASS")
if __name__ == '__main__':
print("Running SMTPTest")
SMTPTest().main(["--regtest", "--disableexchangerates"])
| mit | 1,382,961,265,109,162,200 | 38.18 | 127 | 0.599456 | false |
laurent-george/protolab_sound_recognition | tests/test_bell_detection.py | 1 | 1178 | __author__ = 'lgeorge'
import os
import glob
import subprocess
from sound_classification import classification_service
from test_common import _get_training_data, wget_file
import pytest
def test_classifier_simple():
"""
just check that the service is correctly installed
:return:
"""
sound_classification_obj = classification_service.SoundClassification()
assert(True)
@pytest.mark.parametrize("enable_calibration_of_score", [(False), (True)])
def test_bell_detection(enable_calibration_of_score):
dataset_path = _get_training_data()
file_regexp = os.path.join(dataset_path, '*.wav')
files = glob.glob(file_regexp)
sound_classification_obj = classification_service.SoundClassification(wav_file_list=files, calibrate_score=enable_calibration_of_score)
sound_classification_obj.learn()
test_file_url = "https://www.dropbox.com/s/8dlr28s9gby46h1/bell_test.wav?dl=0"
test_file = wget_file(test_file_url)
test_file = os.path.abspath(test_file)
res = sound_classification_obj.processed_wav(test_file)
## TODO assert deskbell is better than doorbell
assert('DeskBell' in set([x.class_predicted for x in res]))
| mit | -5,147,432,217,201,743,000 | 34.69697 | 139 | 0.7309 | false |
radosuav/QGIS | python/plugins/processing/algs/gdal/tri.py | 15 | 4495 | # -*- coding: utf-8 -*-
"""
***************************************************************************
tri.py
---------------------
Date : October 2013
Copyright : (C) 2013 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'October 2013'
__copyright__ = '(C) 2013, Alexander Bruy'
import os
from qgis.core import (QgsProcessingException,
QgsProcessingParameterDefinition,
QgsProcessingParameterRasterLayer,
QgsProcessingParameterBand,
QgsProcessingParameterString,
QgsProcessingParameterBoolean,
QgsProcessingParameterRasterDestination)
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class tri(GdalAlgorithm):
INPUT = 'INPUT'
BAND = 'BAND'
COMPUTE_EDGES = 'COMPUTE_EDGES'
OPTIONS = 'OPTIONS'
OUTPUT = 'OUTPUT'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.addParameter(QgsProcessingParameterRasterLayer(self.INPUT, self.tr('Input layer')))
self.addParameter(QgsProcessingParameterBand(self.BAND,
self.tr('Band number'),
1,
parentLayerParameterName=self.INPUT))
self.addParameter(QgsProcessingParameterBoolean(self.COMPUTE_EDGES,
self.tr('Compute edges'),
defaultValue=False))
options_param = QgsProcessingParameterString(self.OPTIONS,
self.tr('Additional creation options'),
defaultValue='',
optional=True)
options_param.setFlags(options_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
options_param.setMetadata({
'widget_wrapper': {
'class': 'processing.algs.gdal.ui.RasterOptionsWidget.RasterOptionsWidgetWrapper'}})
self.addParameter(options_param)
self.addParameter(QgsProcessingParameterRasterDestination(self.OUTPUT, self.tr('Terrain Ruggedness Index')))
def name(self):
return 'triterrainruggednessindex'
def displayName(self):
return self.tr('Terrain Ruggedness Index (TRI)')
def group(self):
return self.tr('Raster analysis')
def groupId(self):
return 'rasteranalysis'
def commandName(self):
return 'gdaldem'
def getConsoleCommands(self, parameters, context, feedback, executing=True):
arguments = ['TRI']
inLayer = self.parameterAsRasterLayer(parameters, self.INPUT, context)
if inLayer is None:
raise QgsProcessingException(self.invalidRasterError(parameters, self.INPUT))
arguments.append(inLayer.source())
out = self.parameterAsOutputLayer(parameters, self.OUTPUT, context)
self.setOutputValue(self.OUTPUT, out)
arguments.append(out)
arguments.append('-b')
arguments.append(str(self.parameterAsInt(parameters, self.BAND, context)))
if self.parameterAsBoolean(parameters, self.COMPUTE_EDGES, context):
arguments.append('-compute_edges')
options = self.parameterAsString(parameters, self.OPTIONS, context)
if options:
arguments.extend(GdalUtils.parseCreationOptions(options))
return [self.commandName(), GdalUtils.escapeAndJoin(arguments)]
| gpl-2.0 | 2,633,965,359,591,796,000 | 40.62037 | 116 | 0.541491 | false |
vasili-v/distcovery | test/test_test.py | 1 | 8160 | import unittest
import os
import sys
import StringIO
from distutils import log
from distutils.cmd import Command
from distutils.dist import Distribution
from utils import mock_directory_tree, PreserveOs, ImportTrash
# Reload module to run its global section under coverage supervision
import distcovery.test
reload(distcovery.test)
import distcovery.importer
reload(distcovery.importer)
from distcovery.exceptions import NoTestModulesException, \
UnknownModulesException
from distcovery.path import Package
from distcovery.test import Test
class TestTest(ImportTrash, PreserveOs, unittest.TestCase):
def setUp(self):
super(TestTest, self).setUp()
self.__threshold = log.set_threshold(log.INFO)
self.__stdout = sys.stdout
self.stdout = StringIO.StringIO()
sys.stdout = self.stdout
self.__unittest_main = unittest.main
def tearDown(self):
unittest.main = self.__unittest_main
sys.stdout = self.__stdout
log.set_threshold(self.__threshold)
super(TestTest, self).tearDown()
def test_class_attributes(self):
self.assertTrue(issubclass(Test, Command))
self.assertTrue(hasattr(Test, 'description'))
self.assertTrue(hasattr(Test, 'user_options'))
self.assertTrue(hasattr(Test, 'boolean_options'))
def test_creation(self):
test = Test(Distribution())
self.assertTrue(isinstance(test, Test))
self.assertEqual(test.module, None)
self.assertEqual(test.coverage_base, None)
self.assertEqual(test.no_coverage, None)
self.assertEqual(test.test_root, 'test')
def test_finalize_options(self):
test = Test(Distribution())
test.distribution.get_command_obj('install').install_purelib = 'test'
test.finalize_options()
self.assertEqual(test.coverage_base, 'test')
def test_collect_tests_empty(self):
tree = {('.',): tuple()}
os.listdir, os.path.isfile, os.path.isdir = mock_directory_tree(tree)
test = Test(Distribution())
test.test_root = '.'
with self.assertRaises(NoTestModulesException) as ctx:
test.collect_tests()
self.assertTrue(hasattr(test, 'test_package'))
self.assertTrue(isinstance(test.test_package, Package))
self.assertEqual(test.test_package.modules, [])
self.assertEqual(test.test_package.packages, [])
self.assertEqual(test.test_package.content, {})
self.assertEqual(ctx.exception.message,
NoTestModulesException.template % \
{'path': test.test_root})
def test_collect_tests(self):
self.full_test_tree()
test = Test(Distribution())
test.test_root = '.'
test.collect_tests()
self.assertTrue(hasattr(test, 'test_package'))
self.assertTrue(isinstance(test.test_package, Package))
content = {}
for alias, importable in test.test_package.content.iteritems():
content[alias] = importable.str_name()
self.assertEqual(content, self.expected_content)
def test_register_importer(self):
self.full_test_tree()
test = Test(Distribution())
test.test_root = '.'
test.collect_tests()
test.register_importer()
self.assertTrue(hasattr(test, 'importer'))
self.meta_path_trash.append(test.importer)
self.assertIn(test.importer, sys.meta_path)
def test_print_test_package(self):
self.full_test_tree()
test = Test(Distribution())
test.test_root = '.'
test.collect_tests()
test.print_test_package()
self.assertEqual(self.stdout.getvalue(),
'Test suites:\n' \
'\tfirst\n' \
'\tsecond\n' \
'\tsub_first:\n' \
'\t\tsub_first.sub_first\n' \
'\tsub_third:\n' \
'\t\tsub_third.sub_first\n' \
'\t\tsub_third.sub_second:\n' \
'\t\t\tsub_third.sub_second.sub_first\n')
def test_validate_modules_unknown_modules(self):
self.full_test_tree()
test = Test(Distribution())
test.test_root = '.'
test.collect_tests()
test.register_importer()
self.meta_path_trash.append(test.importer)
modules = ['first_unknown', 'third_unknown', 'fourth_unknown']
with self.assertRaises(UnknownModulesException) as ctx:
test.validate_modules(modules + ['second', 'first'])
modules, suffix = UnknownModulesException.stringify_list(modules)
self.assertEqual(ctx.exception.message,
UnknownModulesException.template % \
{'modules': modules, 'suffix': suffix})
def test_validate_modules(self):
self.full_test_tree()
test = Test(Distribution())
test.test_root = '.'
test.collect_tests()
test.register_importer()
self.meta_path_trash.append(test.importer)
test.validate_modules(['second', 'first', 'sub_first'])
def test_map_module(self):
self.full_test_tree()
test = Test(Distribution())
test.test_root = '.'
test.collect_tests()
test.register_importer()
self.meta_path_trash.append(test.importer)
self.assertRegexpMatches(test.map_module(None), '^X_\\d+$')
self.assertRegexpMatches(test.map_module('sub_first'), '^X_\\d+$')
self.assertEqual(test.map_module('second'), 'test_second')
self.assertEqual(test.map_module('sub_third.sub_second.sub_first'),
'test_sub_third.test_sub_second.test_sub_first')
def test_run_print_test_package(self):
self.full_test_tree()
test = Test(Distribution())
test.test_root = '.'
test.dry_run = True
test.run()
self.assertEqual(self.stdout.getvalue(),
'Test suites:\n' \
'\tfirst\n' \
'\tsecond\n' \
'\tsub_first:\n' \
'\t\tsub_first.sub_first\n' \
'\tsub_third:\n' \
'\t\tsub_third.sub_first\n' \
'\t\tsub_third.sub_second:\n' \
'\t\t\tsub_third.sub_second.sub_first\n')
def test_run(self):
self.full_test_tree()
arguments = []
def main(*args, **kwargs):
arguments.append((args, kwargs))
unittest.main = main
test = Test(Distribution())
test.test_root = '.'
test.module = 'first'
test.no_coverage = True
test.run()
self.assertEqual(arguments,
[(('test_first',),
{'argv': sys.argv[:1],
'exit': False,
'verbosity': 1})])
def test_run_default(self):
self.full_test_tree()
arguments = []
def main(*args, **kwargs):
arguments.append((args, kwargs))
unittest.main = main
test = Test(Distribution())
test.test_root = '.'
test.module = None
test.no_coverage = True
test.run()
self.assertEqual(len(arguments), 1, 'Expected 1 set of arguments, ' \
'got %s' % repr(arguments))
arguments = arguments[0]
self.assertEqual(len(arguments), 2, 'Expected tuple with 2 items, ' \
'got %s' % repr(arguments))
args, kwargs = arguments
self.assertEqual(len(args), 1, 'Expected tuple with 1 item, got %s' % \
repr(args))
self.assertRegexpMatches(args[0], '^X_\\d+$')
self.assertEqual(kwargs, {'argv': sys.argv[:1],
'exit': False,
'verbosity': 1})
if __name__ == '__main__':
unittest.main()
| mit | 4,287,282,404,313,422,000 | 32.719008 | 79 | 0.557353 | false |
ds-hwang/chromium-crosswalk | tools/perf/benchmarks/indexeddb_perf.py | 5 | 3978 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs Chromium's IndexedDB performance test. These test:
Databases:
create/delete
Keys:
create/delete
Indexes:
create/delete
Data access:
Random read/write
Read cache
Cursors:
Read & random writes
Walking multiple
Seeking.
"""
import json
import os
from core import path_util
from core import perf_benchmark
from telemetry import page as page_module
from telemetry import story
from telemetry.page import page_test
from telemetry.value import scalar
from metrics import memory
from metrics import power
import page_sets
from telemetry.timeline import tracing_category_filter
from telemetry.web_perf import timeline_based_measurement
IDB_CATEGORY = 'IndexedDB'
TIMELINE_REQUIRED_CATEGORY = 'blink.console'
class _IndexedDbMeasurement(page_test.PageTest):
def __init__(self):
super(_IndexedDbMeasurement, self).__init__()
self._memory_metric = None
self._power_metric = None
def WillStartBrowser(self, platform):
"""Initialize metrics once right before the browser has been launched."""
self._power_metric = power.PowerMetric(platform)
def DidStartBrowser(self, browser):
"""Initialize metrics once right after the browser has been launched."""
self._memory_metric = memory.MemoryMetric(browser)
def DidNavigateToPage(self, page, tab):
self._memory_metric.Start(page, tab)
self._power_metric.Start(page, tab)
def ValidateAndMeasurePage(self, page, tab, results):
tab.WaitForDocumentReadyStateToBeComplete()
tab.WaitForJavaScriptExpression('window.done', 600)
self._power_metric.Stop(page, tab)
self._memory_metric.Stop(page, tab)
self._memory_metric.AddResults(tab, results)
self._power_metric.AddResults(tab, results)
js_get_results = 'JSON.stringify(automation.getResults());'
result_dict = json.loads(tab.EvaluateJavaScript(js_get_results))
total = 0.0
for key in result_dict:
if key == 'OverallTestDuration':
continue
msec = float(result_dict[key])
results.AddValue(scalar.ScalarValue(
results.current_page, key, 'ms', msec, important=False))
total += msec
results.AddValue(scalar.ScalarValue(
results.current_page, 'Total Perf', 'ms', total))
def CustomizeBrowserOptions(self, options):
memory.MemoryMetric.CustomizeBrowserOptions(options)
power.PowerMetric.CustomizeBrowserOptions(options)
class IndexedDbOriginal(perf_benchmark.PerfBenchmark):
"""Chromium's IndexedDB Performance tests."""
test = _IndexedDbMeasurement
@classmethod
def Name(cls):
return 'indexeddb_perf'
def CreateStorySet(self, options):
indexeddb_dir = os.path.join(path_util.GetChromiumSrcDir(), 'chrome',
'test', 'data', 'indexeddb')
ps = story.StorySet(base_dir=indexeddb_dir)
ps.AddStory(page_module.Page('file://perf_test.html', ps, ps.base_dir))
return ps
class IndexedDbOriginalSectioned(perf_benchmark.PerfBenchmark):
"""Chromium's IndexedDB Performance tests."""
test = _IndexedDbMeasurement
page_set = page_sets.IndexedDBEndurePageSet
@classmethod
def Name(cls):
return 'storage.indexeddb_endure'
class IndexedDbTracing(perf_benchmark.PerfBenchmark):
"""IndexedDB Performance tests that use tracing."""
page_set = page_sets.IndexedDBEndurePageSet
def CreateTimelineBasedMeasurementOptions(self):
cat_filter = tracing_category_filter.CreateMinimalOverheadFilter()
cat_filter.AddIncludedCategory(IDB_CATEGORY)
cat_filter.AddIncludedCategory(TIMELINE_REQUIRED_CATEGORY)
return timeline_based_measurement.Options(
overhead_level=cat_filter)
@classmethod
def Name(cls):
return 'storage.indexeddb_endure_tracing'
@classmethod
def ValueCanBeAddedPredicate(cls, value, is_first_result):
return 'idb' in value.name
| bsd-3-clause | -3,815,484,576,776,627,700 | 27.826087 | 77 | 0.734289 | false |
fhartwig/adhocracy3.mercator | src/adhocracy_core/adhocracy_core/auditing/__init__.py | 2 | 4803 | """Log which user modifies resources in additional 'audit' database."""
import transaction
import substanced.util
from pyramid.traversal import resource_path
from pyramid.request import Request
from pyramid.response import Response
from BTrees.OOBTree import OOBTree
from datetime import datetime
from logging import getLogger
from adhocracy_core.utils import get_user
from adhocracy_core.sheets.principal import IUserBasic
from adhocracy_core.utils import get_sheet_field
from adhocracy_core.interfaces import IResource
from adhocracy_core.interfaces import ChangelogMetadata
from adhocracy_core.interfaces import VisibilityChange
from adhocracy_core.interfaces import AuditlogAction
from adhocracy_core.interfaces import AuditlogEntry
logger = getLogger(__name__)
class AuditLog(OOBTree):
"""An Auditlog composed of audit entries.
This is a dictionary (:class:`collections.abc.Mapping`) with key
:class:`datetime.datetime` and value
:class:`adhocracy_core.interfaces.AuditlogEntry`.
The methods `items`, `keys`, and `values` have the additional kwargs
`max_key` and `min_key` to allow range queries::
january = datetime(2015, 1, 1)
february = datetime(2015, 2, 1)
audit = get_auditlog(context)
audit.items(min=january, max=february)
...
"""
def add(self,
name: AuditlogAction,
resource_path: str,
user_name: str,
user_path: str) -> None:
""" Add an auditlog entry to the audit log."""
self[datetime.utcnow()] = AuditlogEntry(name,
resource_path,
user_name,
user_path)
def get_auditlog(context: IResource) -> AuditLog:
"""Return the auditlog."""
return substanced.util.get_auditlog(context)
def set_auditlog(context: IResource) -> None:
"""Set an auditlog for the context."""
conn = context._p_jar
try:
connection = conn.get_connection('audit')
except KeyError:
return
root = connection.root()
if 'auditlog' in root:
return
auditlog = AuditLog()
root['auditlog'] = auditlog
def log_auditevent(context: IResource,
name: AuditlogAction,
user_name: str,
user_path: str) -> None:
"""Add an auditlog entry for `context` to the audit database.
The audit database is created if missing. If the `zodbconn.uri.audit`
value is not specified in the config, auditing does not happen.
"""
auditlog = get_auditlog(context)
path = resource_path(context)
if auditlog is not None:
auditlog.add(name, path, user_name, user_path)
def audit_resources_changes_callback(request: Request,
response: Response) -> None:
"""Add auditlog entries to the auditlog when the resources are changed.
This is a :term:`response- callback` that run after a request has
finished. To store the audit entry it adds an additional transaction.
"""
registry = request.registry
changelog_metadata = registry.changelog.values()
user_name, user_path = _get_user_info(request)
for meta in changelog_metadata:
_log_change(request.context, user_name, user_path, meta)
def _get_user_info(request: Request) -> (str, str):
if not hasattr(request, 'authenticated_userid'):
return ('', '') # ease scripting without user and testing
user = get_user(request)
if user is None:
return ('', '')
else:
user_name = get_sheet_field(user, IUserBasic, 'name')
user_path = resource_path(user)
return (user_name, user_path)
def _log_change(context: IResource,
user_name: str,
user_path: str,
change: ChangelogMetadata) -> None:
data_changed = change.created or change.modified
visibility_changed = change.visibility in [VisibilityChange.concealed,
VisibilityChange.revealed]
if data_changed or visibility_changed:
action_name = _get_entry_name(change),
log_auditevent(context,
action_name,
user_name=user_name,
user_path=user_path)
transaction.commit()
def _get_entry_name(change) -> str:
if change.created:
return AuditlogAction.created
elif change.modified:
return AuditlogAction.modified
elif change.visibility == VisibilityChange.concealed:
return AuditlogAction.concealed
elif change.visibility == VisibilityChange.revealed:
return AuditlogAction.revealed
else:
raise ValueError('Invalid change state', change)
| agpl-3.0 | -945,154,538,102,320,400 | 33.553957 | 75 | 0.639808 | false |
camradal/ansible | test/units/modules/network/ios/test_ios_vrf.py | 13 | 6440 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.errors import AnsibleModuleExit
from ansible.modules.network.ios import ios_vrf
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
def set_module_args(args):
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args)
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except:
pass
fixture_data[path] = data
return data
class TestIosVrfModule(unittest.TestCase):
def setUp(self):
self.mock_get_config = patch('ansible.modules.network.ios.ios_vrf.get_config')
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch('ansible.modules.network.ios.ios_vrf.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
self.mock_get_config.stop()
self.mock_load_config.stop()
def execute_module(self, failed=False, changed=False, commands=None, sort=True):
self.get_config.return_value = load_fixture('ios_vrf_config.cfg')
self.load_config.return_value = None
with self.assertRaises(AnsibleModuleExit) as exc:
ios_vrf.main()
result = exc.exception.result
if failed:
self.assertTrue(result['failed'], result)
else:
self.assertEqual(result.get('changed'), changed, result)
if commands:
if sort:
self.assertEqual(sorted(commands), sorted(result['commands']))
else:
self.assertEqual(commands, result['commands'], result['commands'])
return result
def test_ios_vrf_name(self):
set_module_args(dict(name='test_4'))
commands = ['vrf definition test_4']
self.execute_module(changed=True, commands=commands, sort=False)
def test_ios_vrf_name_unchanged(self):
set_module_args(dict(name='test_1', rd='1:100', description='test vrf 1'))
self.execute_module()
def test_ios_vrf_description(self):
set_module_args(dict(name='test_1', description='test string'))
commands = ['vrf definition test_1', 'description test string']
self.execute_module(changed=True, commands=commands, sort=False)
def test_ios_vrf_rd(self):
set_module_args(dict(name='test_1', rd='2:100'))
commands = ['vrf definition test_1', 'rd 2:100']
self.execute_module(changed=True, commands=commands, sort=False)
def test_ios_vrf_interfaces(self):
set_module_args(dict(name='test_1', interfaces=['Ethernet1']))
commands = ['interface Ethernet2', 'no vrf forwarding test_1',
'interface Ethernet1', 'vrf forwarding test_1',
'ip address 1.2.3.4/5']
self.execute_module(changed=True, commands=commands, sort=False)
def test_ios_vrf_state_absent(self):
set_module_args(dict(name='test_1', state='absent'))
commands = ['no vrf definition test_1']
self.execute_module(changed=True, commands=commands)
def test_ios_vrf_purge_all(self):
set_module_args(dict(purge=True))
commands = ['no vrf definition test_1', 'no vrf definition test_2',
'no vrf definition test_3']
self.execute_module(changed=True, commands=commands)
def test_ios_vrf_purge_all_but_one(self):
set_module_args(dict(name='test_1', purge=True))
commands = ['no vrf definition test_2', 'no vrf definition test_3']
self.execute_module(changed=True, commands=commands)
def test_ios_vrfs_no_purge(self):
vrfs = [{'name': 'test_1'}, {'name': 'test_4'}]
set_module_args(dict(vrfs=vrfs))
commands = ['vrf definition test_4']
self.execute_module(changed=True, commands=commands)
def test_ios_vrfs_purge(self):
vrfs = [{'name': 'test_1'}, {'name': 'test_4'}]
set_module_args(dict(vrfs=vrfs, purge=True))
commands = ['no vrf definition test_2', 'no vrf definition test_3',
'vrf definition test_4']
self.execute_module(changed=True, commands=commands)
def test_ios_vrfs_global_arg(self):
vrfs = [{'name': 'test_1'}, {'name': 'test_2'}]
set_module_args(dict(vrfs=vrfs, description='test string'))
commands = ['vrf definition test_1', 'description test string',
'vrf definition test_2', 'description test string']
self.execute_module(changed=True, commands=commands, sort=False)
def test_ios_vrfs_local_override_description(self):
vrfs = [{'name': 'test_1', 'description': 'test vrf 1'},
{'name': 'test_2'}]
set_module_args(dict(vrfs=vrfs, description='test string'))
commands = ['vrf definition test_2', 'description test string']
self.execute_module(changed=True, commands=commands, sort=False)
def test_ios_vrfs_local_override_state(self):
vrfs = [{'name': 'test_1', 'state': 'absent'},
{'name': 'test_2'}]
set_module_args(dict(vrfs=vrfs, description='test string'))
commands = ['no vrf definition test_1', 'vrf definition test_2',
'description test string']
self.execute_module(changed=True, commands=commands, sort=False)
| gpl-3.0 | -3,982,276,343,558,475,000 | 36.44186 | 88 | 0.643323 | false |
olivierdalang/stdm | third_party/sqlalchemy/util/__init__.py | 1 | 2405 | # util/__init__.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from .compat import callable, cmp, reduce, \
threading, py3k, py33, py2k, jython, pypy, cpython, win32, \
pickle, dottedgetter, parse_qsl, namedtuple, next, reraise, \
raise_from_cause, text_type, safe_kwarg, string_types, int_types, \
binary_type, nested, \
quote_plus, with_metaclass, print_, itertools_filterfalse, u, ue, b,\
unquote_plus, unquote, b64decode, b64encode, byte_buffer, itertools_filter,\
iterbytes, StringIO, inspect_getargspec, zip_longest
from ._collections import KeyedTuple, ImmutableContainer, immutabledict, \
Properties, OrderedProperties, ImmutableProperties, OrderedDict, \
OrderedSet, IdentitySet, OrderedIdentitySet, column_set, \
column_dict, ordered_column_set, populate_column_dict, unique_list, \
UniqueAppender, PopulateDict, EMPTY_SET, to_list, to_set, \
to_column_set, update_copy, flatten_iterator, \
LRUCache, ScopedRegistry, ThreadLocalRegistry, WeakSequence, \
coerce_generator_arg
from .langhelpers import iterate_attributes, class_hierarchy, \
portable_instancemethod, unbound_method_to_callable, \
getargspec_init, format_argspec_init, format_argspec_plus, \
get_func_kwargs, get_cls_kwargs, decorator, as_interface, \
memoized_property, memoized_instancemethod, md5_hex, \
group_expirable_memoized_property, dependencies, decode_slice, \
monkeypatch_proxied_specials, asbool, bool_or_str, coerce_kw_type,\
duck_type_collection, assert_arg_type, symbol, dictlike_iteritems,\
classproperty, set_creation_order, warn_exception, warn, NoneType,\
constructor_copy, methods_equivalent, chop_traceback, asint,\
generic_repr, counter, PluginLoader, hybridmethod, safe_reraise,\
get_callable_argspec, only_once
from .deprecations import warn_deprecated, warn_pending_deprecation, \
deprecated, pending_deprecation, inject_docstring_text
# things that used to be not always available,
# but are now as of current support Python versions
from collections import defaultdict
from functools import partial
from functools import update_wrapper
from contextlib import contextmanager
| gpl-2.0 | 4,782,946,644,541,882,000 | 49.170213 | 80 | 0.739293 | false |
polymonster/pmtech | tools/pmbuild_ext/dependencies.py | 1 | 6375 | import os
import json
default_settings = dict()
default_settings["textures_dir"] = "assets/textures/"
default_settings["models_dir"] = "assets/mesh/"
def delete_orphaned_files(build_dir, platform_data_dir):
for root, dir, files in os.walk(build_dir):
for file in files:
dest_file = os.path.join(root, file)
if dest_file.find("dependencies.json") != -1:
depends_file = open(dest_file, "r")
depends_json = json.loads(depends_file.read())
depends_file.close()
for file_dependencies in depends_json["files"]:
for key in file_dependencies.keys():
for dependency_info in file_dependencies[key]:
if not os.path.exists(dependency_info["name"]):
del_path = os.path.join(platform_data_dir, key)
if os.path.exists(del_path):
os.remove(os.path.join(platform_data_dir, key))
print("deleting " + key + " source file no longer exists")
print(del_path)
break
def get_build_config_setting(dir_name):
if os.path.exists("build_config.json"):
build_config_file = open("build_config.json", "r")
build_config_json = json.loads(build_config_file.read())
build_config_file.close()
if dir_name in build_config_json:
return build_config_json[dir_name]
return default_settings[dir_name]
def export_config_merge(master, second):
for key in master.keys():
if key in second.keys():
master[key] = export_config_merge(master[key], second[key])
for key in second.keys():
if key not in master.keys():
master[key] = second[key]
return master
def get_export_config(filename):
export_info = dict()
rpath = filename.replace(os.getcwd(), "")
rpath = os.path.normpath(rpath)
sub_dirs = rpath.split(os.sep)
full_path = os.getcwd()
for dir in sub_dirs:
full_path = os.path.join(full_path, dir)
dir_export_file = os.path.join(full_path, "_export.json")
if os.path.exists(dir_export_file):
file = open(dir_export_file, "r")
file_json = file.read()
dir_info = json.loads(file_json)
export_info = export_config_merge(export_info, dir_info)
return export_info
def sanitize_filename(filename):
sanitized_name = filename.replace("@", ":")
sanitized_name = sanitized_name.replace('/', os.sep)
return sanitized_name
def create_info(file):
file = sanitize_filename(file)
file = os.path.normpath(os.path.join(os.getcwd(), file))
modified_time = os.path.getmtime(file)
return {"name": file, "timestamp": float(modified_time)}
def create_dependency_info(inputs, outputs, cmdline=""):
info = dict()
info["cmdline"] = cmdline
info["files"] = dict()
for o in outputs:
o = os.path.join(os.getcwd(), o)
info["files"][o] = []
for i in inputs:
if not os.path.exists(i):
continue
ii = create_info(i)
ii["data_file"] = o[o.find(os.sep + "data" + os.sep) + 1:]
info["files"][o].append(ii)
return info
def check_up_to_date(dependencies, dest_file):
filename = os.path.join(dependencies["dir"], "dependencies.json")
if not os.path.exists(filename):
print("depends does not exist")
return False
file = open(filename)
d_str = file.read()
d_json = json.loads(d_str)
file_exists = False
for d in d_json["files"]:
for key in d.keys():
dependecy_file = sanitize_filename(key)
if dest_file == dependecy_file:
for i in d[key]:
file_exists = True
sanitized = sanitize_filename(i["name"])
if not os.path.exists(sanitized):
return False
if i["timestamp"] < os.path.getmtime(sanitized):
return False
if not file_exists:
return False
return True
def check_up_to_date_single(dest_file, deps):
dest_file = sanitize_filename(dest_file)
dep_filename = dest_file.replace(os.path.splitext(dest_file)[1], ".dep")
if not os.path.exists(dep_filename):
print(os.path.basename(dest_file) + ": deps does not exist.")
return False
dep_ts = os.path.getmtime(dest_file)
file = open(dep_filename)
d_str = file.read()
d_json = json.loads(d_str)
# check for changes to cmdline
if "cmdline" in deps:
if "cmdline" not in d_json.keys() or deps["cmdline"] != d_json["cmdline"]:
print(dest_file + " cmdline changed")
return False
# check for new additions
dep_files = []
for output in d_json["files"]:
for i in d_json["files"][output]:
dep_files.append(i["name"])
for output in deps["files"]:
for i in deps["files"][output]:
if i["name"] not in dep_files:
print(os.path.basename(dest_file) + ": has new inputs")
return False
# check for timestamps on existing
for d in d_json["files"]:
dest_file = sanitize_filename(d)
for input_file in d_json["files"][d]:
# output file does not exist yet
if not os.path.exists(dest_file):
print(os.path.basename(dest_file) + ": does not exist.")
return False
# output file is out of date
if os.path.getmtime(input_file["name"]) > dep_ts:
print(os.path.basename(dest_file) + ": is out of date.")
return False
print(os.path.basename(dest_file) + " up to date")
return True
def write_to_file(dependencies):
dir = dependencies["dir"]
directory_dependencies = os.path.join(dir, "dependencies.json")
try:
output_d = open(directory_dependencies, 'wb+')
output_d.write(bytes(json.dumps(dependencies, indent=4), 'UTF-8'))
output_d.close()
except:
return
def write_to_file_single(deps, file):
output_d = open(file, 'wb+')
output_d.write(bytes(json.dumps(deps, indent=4), 'UTF-8'))
output_d.close()
| mit | 4,657,659,342,399,912,000 | 35.428571 | 94 | 0.566431 | false |
trickvi/bdp2ckan | bdp2ckan.py | 1 | 7778 | # -*- coding: utf-8 -*-
# bdp2ckan.py - Send a budget data package to a CKAN instance
# Copyright (C) 2013 Tryggvi Björgvinsson
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import click
import json
import jsonschema
import requests
import urlparse
import os.path
def create_ckan_package_dict(descriptor):
"""
Convert metadata from a data package to CKAN metadata
"""
# Mapping between metadata keys of CKAN and Data package
# This does not handle licenses (multiple licenses in the data package)
# because CKAN does not support it
ckan_mapping = [('name', 'name'), ('title', 'title'), ('url', 'homepage'),
('version', 'version'), ('license_id', 'license'),
('notes', 'description')]
# Extract available CKAN metadata from the data package
data_dict = {}
for (ckan, dpkg) in ckan_mapping:
if dpkg in descriptor:
data_dict[ckan] = descriptor[dpkg]
return data_dict
def create_ckan_resource_array(descriptor):
"""
Create a CKAN resource array from data package resources
"""
# Mapping between resource metadata keys of CKAN and Data package
# name and path can be overwritten by title and url respectively
resource_mapping = [('name', 'name'), ('name', 'title'),
('description', 'description'), ('format', 'format'),
('mimetype', 'mediatype'), ('size', 'bytes'),
('hash', 'hash'), ('url', 'path'), ('url', 'url')]
# Extract CKAN resources and associated metadata from data package
resource_array = {'resources': []}
for resource in descriptor['resources']:
data_dict = {}
for (ckan, dpkg) in resource_mapping:
if dpkg in resource:
data_dict[ckan] = resource[dpkg]
resource_array['resources'].append(data_dict)
return resource_array
def create_budget_data_package_extras(descriptor):
"""
Create a CKAN extras array from budget data package specific metadata
"""
# Mapping between metadata keys of CKAN extras and Budget data package
# This requires these particular keys to be set on the CKAN instance
# Mapping is excluded because that's just going to look really bad in CKAN
bdp_mapping = [('granularity', 'granularity'), ('direction', 'direction'),
('status', 'status'), ('country', 'countryCode')]
# Extract budget data package metadata as CKAN extras metadata
data_dict = {'extras':[]}
for (ckan, dpkg) in bdp_mapping:
if dpkg in descriptor:
data_dict['extras'].append({'key': dpkg, 'value': descriptor[dpkg]})
return data_dict
def submit_to_ckan(host, apikey, data):
"""
Submit a CKAN data dictionary to a given host with a given api key
"""
# Put together the api url and authorization headers and send the data
package_create_url = urlparse.urljoin(host, '/api/action/package_create')
headers = {'Authorization': apikey}
response = requests.post(package_create_url, headers=headers, json=data)
return (response.status_code, response.text)
@click.command()
@click.option('--schema', default=None, nargs=1, type=click.File('r'),
help='Schema to validate against')
@click.option('--countries', default=None, nargs=1, type=click.File('r'),
help='JSON file with a dictionary of country code to name map')
@click.option('--host', default='localhost', nargs=1,
help='CKAN instance to upload to')
@click.option('--apikey', default=None, nargs=1,
help='CKAN user API key of uploader')
@click.option('--organization', default=None, nargs=1,
help='CKAN organisation the dataset should belong to')
@click.argument('datapackage')
def bdp2ckan(schema, countries, host, apikey, organization, datapackage):
"""
Import a budget data package into CKAN
"""
# Get the datapackage descriptor file
response = requests.get(datapackage)
descriptor = response.json()
# If a schema was provided, we validate the datapackage based on the schema
if schema is not None:
schema_obj = json.load(schema)
jsonschema.validate(descriptor, schema_obj)
# Load countries from a default location if they haven't been supplied
# Default location is data/countries.json
if countries is None:
dirname = os.path.dirname(os.path.realpath(__file__))
filename = os.path.join(dirname, "data", "countries.json")
countries = open(filename)
country_obj = json.load(countries)
countries.close()
# Extract CKAN metadata from the data package
data_dict = create_ckan_package_dict(descriptor)
if organization is not None:
data_dict['owner_org'] = organization
# Fix urls in resources because paths must be turned into urls
# because we don't support file uploads.
resources = create_ckan_resource_array(descriptor)
for resource in resources['resources']:
if 'url' in resource and not (
resource['url'].startswith('http://') or
resource['url'].startswith('https://')):
resource['url'] = urlparse.urljoin(datapackage, resource['url'])
# Add the data package descriptor file as a resource
resources['resources'].append({
'name': 'Data package',
'description': 'The descriptor file for the data package',
'url': datapackage})
# Append the resources to the package. This allows us to create resources
# at the same time as we create the package, but this limits us to linking
# to resources (hence the fix above) instead of uploading. If we want to
# upload, we need to create each resource on its own.
data_dict.update(resources)
# Add budget data package metadata as extras, this requires that the
# CKAN instance will have a schema that accepts these extras
data_dict.update(create_budget_data_package_extras(descriptor))
# Overwrite title to more descriptive of what the dataset contains
# Instead of using the data package title, we use:
# "Country | Direction | Fiscal period"
possible_title_values = [
country_obj.get(descriptor.get('countryCode', ''), None),
descriptor.get('direction', None),
descriptor.get('fiscalPeriod', None)]
data_dict['title'] = ' | '.join(
[v for v in possible_title_values if v is not None])
# Grab currencies from measures
currencies = set()
for measure in descriptor['mapping']['measures'].itervalues():
if 'currency' in measure:
currencies.add(measure['currency'])
if currencies:
if len(currencies) == 1:
currency = currencies.pop()
else:
currency = list(currencies)
data_dict['extras'].append(
{'key': 'currency', 'value': currency})
(status, message) = submit_to_ckan(host, apikey, data_dict)
if status != 200:
raise IOError(
'Unable to submit budget data package to CKAN: {0}'.format(
message)
)
if __name__ == '__main__':
bdp2ckan()
| gpl-3.0 | -3,805,272,379,955,504,000 | 38.277778 | 80 | 0.650251 | false |
doug-fish/horizon | openstack_dashboard/dashboards/identity/projects/workflows.py | 2 | 38674 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from openstack_auth import utils as auth_utils
from horizon import exceptions
from horizon import forms
from horizon import messages
from horizon.utils import memoized
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard.api import base
from openstack_dashboard.api import cinder
from openstack_dashboard.api import keystone
from openstack_dashboard.api import nova
from openstack_dashboard.usage import quotas
INDEX_URL = "horizon:identity:projects:index"
ADD_USER_URL = "horizon:identity:projects:create_user"
PROJECT_GROUP_ENABLED = keystone.VERSIONS.active >= 3
PROJECT_USER_MEMBER_SLUG = "update_members"
PROJECT_GROUP_MEMBER_SLUG = "update_group_members"
COMMON_HORIZONTAL_TEMPLATE = "identity/projects/_common_horizontal_form.html"
class ProjectQuotaAction(workflows.Action):
ifcb_label = _("Injected File Content (Bytes)")
metadata_items = forms.IntegerField(min_value=-1,
label=_("Metadata Items"))
cores = forms.IntegerField(min_value=-1, label=_("VCPUs"))
instances = forms.IntegerField(min_value=-1, label=_("Instances"))
injected_files = forms.IntegerField(min_value=-1,
label=_("Injected Files"))
injected_file_content_bytes = forms.IntegerField(min_value=-1,
label=ifcb_label)
volumes = forms.IntegerField(min_value=-1, label=_("Volumes"))
snapshots = forms.IntegerField(min_value=-1, label=_("Volume Snapshots"))
gigabytes = forms.IntegerField(
min_value=-1, label=_("Total Size of Volumes and Snapshots (GB)"))
ram = forms.IntegerField(min_value=-1, label=_("RAM (MB)"))
floating_ips = forms.IntegerField(min_value=-1, label=_("Floating IPs"))
fixed_ips = forms.IntegerField(min_value=-1, label=_("Fixed IPs"))
security_groups = forms.IntegerField(min_value=-1,
label=_("Security Groups"))
security_group_rules = forms.IntegerField(min_value=-1,
label=_("Security Group Rules"))
# Neutron
security_group = forms.IntegerField(min_value=-1,
label=_("Security Groups"))
security_group_rule = forms.IntegerField(min_value=-1,
label=_("Security Group Rules"))
floatingip = forms.IntegerField(min_value=-1, label=_("Floating IPs"))
network = forms.IntegerField(min_value=-1, label=_("Networks"))
port = forms.IntegerField(min_value=-1, label=_("Ports"))
router = forms.IntegerField(min_value=-1, label=_("Routers"))
subnet = forms.IntegerField(min_value=-1, label=_("Subnets"))
def __init__(self, request, *args, **kwargs):
super(ProjectQuotaAction, self).__init__(request,
*args,
**kwargs)
disabled_quotas = quotas.get_disabled_quotas(request)
for field in disabled_quotas:
if field in self.fields:
self.fields[field].required = False
self.fields[field].widget = forms.HiddenInput()
class UpdateProjectQuotaAction(ProjectQuotaAction):
def clean(self):
cleaned_data = super(UpdateProjectQuotaAction, self).clean()
usages = quotas.tenant_quota_usages(
self.request, tenant_id=self.initial['project_id'])
# Validate the quota values before updating quotas.
bad_values = []
for key, value in cleaned_data.items():
used = usages[key].get('used', 0)
if value is not None and value >= 0 and used > value:
bad_values.append(_('%(used)s %(key)s used') %
{'used': used,
'key': quotas.QUOTA_NAMES.get(key, key)})
if bad_values:
value_str = ", ".join(bad_values)
msg = (_('Quota value(s) cannot be less than the current usage '
'value(s): %s.') %
value_str)
raise forms.ValidationError(msg)
return cleaned_data
class Meta(object):
name = _("Quota")
slug = 'update_quotas'
help_text = _("Set maximum quotas for the project.")
class CreateProjectQuotaAction(ProjectQuotaAction):
class Meta(object):
name = _("Quota")
slug = 'create_quotas'
help_text = _("Set maximum quotas for the project.")
class UpdateProjectQuota(workflows.Step):
action_class = UpdateProjectQuotaAction
template_name = COMMON_HORIZONTAL_TEMPLATE
depends_on = ("project_id",)
contributes = quotas.QUOTA_FIELDS
class CreateProjectQuota(workflows.Step):
action_class = CreateProjectQuotaAction
template_name = COMMON_HORIZONTAL_TEMPLATE
depends_on = ("project_id",)
contributes = quotas.QUOTA_FIELDS
class CreateProjectInfoAction(workflows.Action):
# Hide the domain_id and domain_name by default
domain_id = forms.CharField(label=_("Domain ID"),
required=False,
widget=forms.HiddenInput())
domain_name = forms.CharField(label=_("Domain Name"),
required=False,
widget=forms.HiddenInput())
name = forms.CharField(label=_("Name"),
max_length=64)
description = forms.CharField(widget=forms.widgets.Textarea(
attrs={'rows': 4}),
label=_("Description"),
required=False)
enabled = forms.BooleanField(label=_("Enabled"),
required=False,
initial=True)
def __init__(self, request, *args, **kwargs):
super(CreateProjectInfoAction, self).__init__(request,
*args,
**kwargs)
# For keystone V3, display the two fields in read-only
if keystone.VERSIONS.active >= 3:
readonlyInput = forms.TextInput(attrs={'readonly': 'readonly'})
self.fields["domain_id"].widget = readonlyInput
self.fields["domain_name"].widget = readonlyInput
def clean_name(self):
project_name = self.cleaned_data['name']
domain_id = self.cleaned_data['domain_id']
# Due to potential performance issues project name validation
# for the keystone.v2 is omitted
try:
if keystone.VERSIONS.active >= 3:
tenant = api.keystone.tenant_list(
self.request,
domain=domain_id,
filters={'name': project_name})
if tenant:
msg = _('Project name is already in use. Please use a '
'different name.')
raise forms.ValidationError(msg)
except Exception:
exceptions.handle(self.request, ignore=True)
return project_name
class Meta(object):
name = _("Project Information")
help_text = _("Create a project to organize users.")
class CreateProjectInfo(workflows.Step):
action_class = CreateProjectInfoAction
template_name = COMMON_HORIZONTAL_TEMPLATE
contributes = ("domain_id",
"domain_name",
"project_id",
"name",
"description",
"enabled")
class UpdateProjectMembersAction(workflows.MembershipAction):
def __init__(self, request, *args, **kwargs):
super(UpdateProjectMembersAction, self).__init__(request,
*args,
**kwargs)
err_msg = _('Unable to retrieve user list. Please try again later.')
# Use the domain_id from the project
domain_id = self.initial.get("domain_id", None)
project_id = ''
if 'project_id' in self.initial:
project_id = self.initial['project_id']
# Get the default role
try:
default_role = api.keystone.get_default_role(self.request)
# Default role is necessary to add members to a project
if default_role is None:
default = getattr(settings,
"OPENSTACK_KEYSTONE_DEFAULT_ROLE", None)
msg = (_('Could not find default role "%s" in Keystone') %
default)
raise exceptions.NotFound(msg)
except Exception:
exceptions.handle(self.request,
err_msg,
redirect=reverse(INDEX_URL))
default_role_name = self.get_default_role_field_name()
self.fields[default_role_name] = forms.CharField(required=False)
self.fields[default_role_name].initial = default_role.id
# Get list of available users
all_users = []
try:
all_users = api.keystone.user_list(request,
domain=domain_id)
except Exception:
exceptions.handle(request, err_msg)
users_list = [(user.id, user.name) for user in all_users]
# Get list of roles
role_list = []
try:
role_list = api.keystone.role_list(request)
except Exception:
exceptions.handle(request,
err_msg,
redirect=reverse(INDEX_URL))
for role in role_list:
field_name = self.get_member_field_name(role.id)
label = role.name
self.fields[field_name] = forms.MultipleChoiceField(required=False,
label=label)
self.fields[field_name].choices = users_list
self.fields[field_name].initial = []
# Figure out users & roles
if project_id:
try:
users_roles = api.keystone.get_project_users_roles(request,
project_id)
except Exception:
exceptions.handle(request,
err_msg,
redirect=reverse(INDEX_URL))
for user_id in users_roles:
roles_ids = users_roles[user_id]
for role_id in roles_ids:
field_name = self.get_member_field_name(role_id)
self.fields[field_name].initial.append(user_id)
class Meta(object):
name = _("Project Members")
slug = PROJECT_USER_MEMBER_SLUG
class UpdateProjectMembers(workflows.UpdateMembersStep):
action_class = UpdateProjectMembersAction
available_list_title = _("All Users")
members_list_title = _("Project Members")
no_available_text = _("No users found.")
no_members_text = _("No users.")
def contribute(self, data, context):
if data:
try:
roles = api.keystone.role_list(self.workflow.request)
except Exception:
exceptions.handle(self.workflow.request,
_('Unable to retrieve user list.'))
post = self.workflow.request.POST
for role in roles:
field = self.get_member_field_name(role.id)
context[field] = post.getlist(field)
return context
class UpdateProjectGroupsAction(workflows.MembershipAction):
def __init__(self, request, *args, **kwargs):
super(UpdateProjectGroupsAction, self).__init__(request,
*args,
**kwargs)
err_msg = _('Unable to retrieve group list. Please try again later.')
# Use the domain_id from the project
domain_id = self.initial.get("domain_id", None)
project_id = ''
if 'project_id' in self.initial:
project_id = self.initial['project_id']
# Get the default role
try:
default_role = api.keystone.get_default_role(self.request)
# Default role is necessary to add members to a project
if default_role is None:
default = getattr(settings,
"OPENSTACK_KEYSTONE_DEFAULT_ROLE", None)
msg = (_('Could not find default role "%s" in Keystone') %
default)
raise exceptions.NotFound(msg)
except Exception:
exceptions.handle(self.request,
err_msg,
redirect=reverse(INDEX_URL))
default_role_name = self.get_default_role_field_name()
self.fields[default_role_name] = forms.CharField(required=False)
self.fields[default_role_name].initial = default_role.id
# Get list of available groups
all_groups = []
try:
all_groups = api.keystone.group_list(request,
domain=domain_id)
except Exception:
exceptions.handle(request, err_msg)
groups_list = [(group.id, group.name) for group in all_groups]
# Get list of roles
role_list = []
try:
role_list = api.keystone.role_list(request)
except Exception:
exceptions.handle(request,
err_msg,
redirect=reverse(INDEX_URL))
for role in role_list:
field_name = self.get_member_field_name(role.id)
label = role.name
self.fields[field_name] = forms.MultipleChoiceField(required=False,
label=label)
self.fields[field_name].choices = groups_list
self.fields[field_name].initial = []
# Figure out groups & roles
if project_id:
try:
groups_roles = api.keystone.get_project_groups_roles(
request, project_id)
except Exception:
exceptions.handle(request,
err_msg,
redirect=reverse(INDEX_URL))
for group_id in groups_roles:
roles_ids = groups_roles[group_id]
for role_id in roles_ids:
field_name = self.get_member_field_name(role_id)
self.fields[field_name].initial.append(group_id)
class Meta(object):
name = _("Project Groups")
slug = PROJECT_GROUP_MEMBER_SLUG
class UpdateProjectGroups(workflows.UpdateMembersStep):
action_class = UpdateProjectGroupsAction
available_list_title = _("All Groups")
members_list_title = _("Project Groups")
no_available_text = _("No groups found.")
no_members_text = _("No groups.")
def contribute(self, data, context):
if data:
try:
roles = api.keystone.role_list(self.workflow.request)
except Exception:
exceptions.handle(self.workflow.request,
_('Unable to retrieve role list.'))
post = self.workflow.request.POST
for role in roles:
field = self.get_member_field_name(role.id)
context[field] = post.getlist(field)
return context
class CommonQuotaWorkflow(workflows.Workflow):
def _update_project_quota(self, request, data, project_id):
# Update the project quota.
nova_data = dict(
[(key, data[key]) for key in quotas.NOVA_QUOTA_FIELDS])
nova.tenant_quota_update(request, project_id, **nova_data)
if base.is_service_enabled(request, 'volume'):
cinder_data = dict([(key, data[key]) for key in
quotas.CINDER_QUOTA_FIELDS])
cinder.tenant_quota_update(request,
project_id,
**cinder_data)
if api.base.is_service_enabled(request, 'network') and \
api.neutron.is_quotas_extension_supported(request):
neutron_data = {}
disabled_quotas = quotas.get_disabled_quotas(request)
for key in quotas.NEUTRON_QUOTA_FIELDS:
if key not in disabled_quotas:
neutron_data[key] = data[key]
api.neutron.tenant_quota_update(request,
project_id,
**neutron_data)
class CreateProject(CommonQuotaWorkflow):
slug = "create_project"
name = _("Create Project")
finalize_button_name = _("Create Project")
success_message = _('Created new project "%s".')
failure_message = _('Unable to create project "%s".')
success_url = "horizon:identity:projects:index"
default_steps = (CreateProjectInfo,
UpdateProjectMembers,
CreateProjectQuota)
def __init__(self, request=None, context_seed=None, entry_point=None,
*args, **kwargs):
if PROJECT_GROUP_ENABLED:
self.default_steps = (CreateProjectInfo,
UpdateProjectMembers,
UpdateProjectGroups,
CreateProjectQuota)
super(CreateProject, self).__init__(request=request,
context_seed=context_seed,
entry_point=entry_point,
*args,
**kwargs)
def format_status_message(self, message):
if "%s" in message:
return message % self.context.get('name', 'unknown project')
else:
return message
def _create_project(self, request, data):
# create the project
domain_id = data['domain_id']
try:
desc = data['description']
self.object = api.keystone.tenant_create(request,
name=data['name'],
description=desc,
enabled=data['enabled'],
domain=domain_id)
return self.object
except exceptions.Conflict:
msg = _('Project name "%s" is already used.') % data['name']
self.failure_message = msg
return
except Exception:
exceptions.handle(request, ignore=True)
return
def _update_project_members(self, request, data, project_id):
# update project members
users_to_add = 0
try:
available_roles = api.keystone.role_list(request)
member_step = self.get_step(PROJECT_USER_MEMBER_SLUG)
# count how many users are to be added
for role in available_roles:
field_name = member_step.get_member_field_name(role.id)
role_list = data[field_name]
users_to_add += len(role_list)
# add new users to project
for role in available_roles:
field_name = member_step.get_member_field_name(role.id)
role_list = data[field_name]
users_added = 0
for user in role_list:
api.keystone.add_tenant_user_role(request,
project=project_id,
user=user,
role=role.id)
users_added += 1
users_to_add -= users_added
except Exception:
if PROJECT_GROUP_ENABLED:
group_msg = _(", add project groups")
else:
group_msg = ""
exceptions.handle(request,
_('Failed to add %(users_to_add)s project '
'members%(group_msg)s and set project quotas.')
% {'users_to_add': users_to_add,
'group_msg': group_msg})
finally:
auth_utils.remove_project_cache(request.user.token.unscoped_token)
def _update_project_groups(self, request, data, project_id):
# update project groups
groups_to_add = 0
try:
available_roles = api.keystone.role_list(request)
member_step = self.get_step(PROJECT_GROUP_MEMBER_SLUG)
# count how many groups are to be added
for role in available_roles:
field_name = member_step.get_member_field_name(role.id)
role_list = data[field_name]
groups_to_add += len(role_list)
# add new groups to project
for role in available_roles:
field_name = member_step.get_member_field_name(role.id)
role_list = data[field_name]
groups_added = 0
for group in role_list:
api.keystone.add_group_role(request,
role=role.id,
group=group,
project=project_id)
groups_added += 1
groups_to_add -= groups_added
except Exception:
exceptions.handle(request,
_('Failed to add %s project groups '
'and update project quotas.')
% groups_to_add)
def _update_project_quota(self, request, data, project_id):
try:
super(CreateProject, self)._update_project_quota(
request, data, project_id)
except Exception:
exceptions.handle(request, _('Unable to set project quotas.'))
def handle(self, request, data):
project = self._create_project(request, data)
if not project:
return False
project_id = project.id
self._update_project_members(request, data, project_id)
if PROJECT_GROUP_ENABLED:
self._update_project_groups(request, data, project_id)
self._update_project_quota(request, data, project_id)
return True
class UpdateProjectInfoAction(CreateProjectInfoAction):
enabled = forms.BooleanField(required=False, label=_("Enabled"))
def __init__(self, request, initial, *args, **kwargs):
super(UpdateProjectInfoAction, self).__init__(
request, initial, *args, **kwargs)
if initial['project_id'] == request.user.project_id:
self.fields['enabled'].widget.attrs['disabled'] = True
self.fields['enabled'].help_text = _(
'You cannot disable your current project')
def clean(self):
cleaned_data = super(UpdateProjectInfoAction, self).clean()
# NOTE(tsufiev): in case the current project is being edited, its
# 'enabled' field is disabled to prevent changing the field value
# which is always `True` for the current project (because the user
# logged in it). Since Django treats disabled checkbox as providing
# `False` value even if its initial value is `True`, we need to
# restore the original `True` value of 'enabled' field here.
if self.fields['enabled'].widget.attrs.get('disabled', False):
cleaned_data['enabled'] = True
return cleaned_data
def clean_name(self):
project_name = self.cleaned_data['name']
if self.initial['name'] == project_name:
return project_name
return super(UpdateProjectInfoAction, self).clean_name()
class Meta(object):
name = _("Project Information")
slug = 'update_info'
help_text = _("Edit the project details.")
class UpdateProjectInfo(workflows.Step):
action_class = UpdateProjectInfoAction
template_name = COMMON_HORIZONTAL_TEMPLATE
depends_on = ("project_id",)
contributes = ("domain_id",
"domain_name",
"name",
"description",
"enabled")
class UpdateProject(CommonQuotaWorkflow):
slug = "update_project"
name = _("Edit Project")
finalize_button_name = _("Save")
success_message = _('Modified project "%s".')
failure_message = _('Unable to modify project "%s".')
success_url = "horizon:identity:projects:index"
default_steps = (UpdateProjectInfo,
UpdateProjectMembers,
UpdateProjectQuota)
def __init__(self, request=None, context_seed=None, entry_point=None,
*args, **kwargs):
if PROJECT_GROUP_ENABLED:
self.default_steps = (UpdateProjectInfo,
UpdateProjectMembers,
UpdateProjectGroups,
UpdateProjectQuota)
super(UpdateProject, self).__init__(request=request,
context_seed=context_seed,
entry_point=entry_point,
*args,
**kwargs)
def format_status_message(self, message):
if "%s" in message:
return message % self.context.get('name', 'unknown project')
else:
return message
@memoized.memoized_method
def _get_available_roles(self, request):
return api.keystone.role_list(request)
def _update_project(self, request, data):
# update project info
try:
project_id = data['project_id']
return api.keystone.tenant_update(
request,
project_id,
name=data['name'],
description=data['description'],
enabled=data['enabled'])
except exceptions.Conflict:
msg = _('Project name "%s" is already used.') % data['name']
self.failure_message = msg
return
except Exception:
exceptions.handle(request, ignore=True)
return
def _add_roles_to_users(self, request, data, project_id, user_id,
role_ids, available_roles):
member_step = self.get_step(PROJECT_USER_MEMBER_SLUG)
current_role_ids = list(role_ids)
for role in available_roles:
field_name = member_step.get_member_field_name(role.id)
# Check if the user is in the list of users with this role.
if user_id in data[field_name]:
# Add it if necessary
if role.id not in current_role_ids:
# user role has changed
api.keystone.add_tenant_user_role(
request,
project=project_id,
user=user_id,
role=role.id)
else:
# User role is unchanged, so remove it from the
# remaining roles list to avoid removing it later.
index = current_role_ids.index(role.id)
current_role_ids.pop(index)
return current_role_ids
def _remove_roles_from_user(self, request, project_id, user_id,
current_role_ids):
for id_to_delete in current_role_ids:
api.keystone.remove_tenant_user_role(
request,
project=project_id,
user=user_id,
role=id_to_delete)
def _is_removing_self_admin_role(self, request, project_id, user_id,
available_roles, current_role_ids):
is_current_user = user_id == request.user.id
is_current_project = project_id == request.user.tenant_id
available_admin_role_ids = [role.id for role in available_roles
if role.name.lower() == 'admin']
admin_roles = [role for role in current_role_ids
if role in available_admin_role_ids]
if len(admin_roles):
removing_admin = any([role in current_role_ids
for role in admin_roles])
else:
removing_admin = False
if is_current_user and is_current_project and removing_admin:
# Cannot remove "admin" role on current(admin) project
msg = _('You cannot revoke your administrative privileges '
'from the project you are currently logged into. '
'Please switch to another project with '
'administrative privileges or remove the '
'administrative role manually via the CLI.')
messages.warning(request, msg)
return True
else:
return False
def _update_project_members(self, request, data, project_id):
# update project members
users_to_modify = 0
# Project-user member step
member_step = self.get_step(PROJECT_USER_MEMBER_SLUG)
try:
# Get our role options
available_roles = self._get_available_roles(request)
# Get the users currently associated with this project so we
# can diff against it.
users_roles = api.keystone.get_project_users_roles(
request, project=project_id)
users_to_modify = len(users_roles)
for user_id in users_roles.keys():
# Check if there have been any changes in the roles of
# Existing project members.
current_role_ids = list(users_roles[user_id])
modified_role_ids = self._add_roles_to_users(
request, data, project_id, user_id,
current_role_ids, available_roles)
# Prevent admins from doing stupid things to themselves.
removing_admin = self._is_removing_self_admin_role(
request, project_id, user_id, available_roles,
modified_role_ids)
# Otherwise go through and revoke any removed roles.
if not removing_admin:
self._remove_roles_from_user(request, project_id, user_id,
modified_role_ids)
users_to_modify -= 1
# Grant new roles on the project.
for role in available_roles:
field_name = member_step.get_member_field_name(role.id)
# Count how many users may be added for exception handling.
users_to_modify += len(data[field_name])
for role in available_roles:
users_added = 0
field_name = member_step.get_member_field_name(role.id)
for user_id in data[field_name]:
if user_id not in users_roles:
api.keystone.add_tenant_user_role(request,
project=project_id,
user=user_id,
role=role.id)
users_added += 1
users_to_modify -= users_added
return True
except Exception:
if PROJECT_GROUP_ENABLED:
group_msg = _(", update project groups")
else:
group_msg = ""
exceptions.handle(request,
_('Failed to modify %(users_to_modify)s'
' project members%(group_msg)s and '
'update project quotas.')
% {'users_to_modify': users_to_modify,
'group_msg': group_msg})
return False
finally:
auth_utils.remove_project_cache(request.user.token.unscoped_token)
def _update_project_groups(self, request, data, project_id, domain_id):
# update project groups
groups_to_modify = 0
member_step = self.get_step(PROJECT_GROUP_MEMBER_SLUG)
try:
available_roles = self._get_available_roles(request)
# Get the groups currently associated with this project so we
# can diff against it.
project_groups = api.keystone.group_list(request,
domain=domain_id,
project=project_id)
groups_to_modify = len(project_groups)
for group in project_groups:
# Check if there have been any changes in the roles of
# Existing project members.
current_roles = api.keystone.roles_for_group(
self.request,
group=group.id,
project=project_id)
current_role_ids = [role.id for role in current_roles]
for role in available_roles:
# Check if the group is in the list of groups with
# this role.
field_name = member_step.get_member_field_name(role.id)
if group.id in data[field_name]:
# Add it if necessary
if role.id not in current_role_ids:
# group role has changed
api.keystone.add_group_role(
request,
role=role.id,
group=group.id,
project=project_id)
else:
# Group role is unchanged, so remove it from
# the remaining roles list to avoid removing it
# later.
index = current_role_ids.index(role.id)
current_role_ids.pop(index)
# Revoke any removed roles.
for id_to_delete in current_role_ids:
api.keystone.remove_group_role(request,
role=id_to_delete,
group=group.id,
project=project_id)
groups_to_modify -= 1
# Grant new roles on the project.
for role in available_roles:
field_name = member_step.get_member_field_name(role.id)
# Count how many groups may be added for error handling.
groups_to_modify += len(data[field_name])
for role in available_roles:
groups_added = 0
field_name = member_step.get_member_field_name(role.id)
for group_id in data[field_name]:
if not filter(lambda x: group_id == x.id,
project_groups):
api.keystone.add_group_role(request,
role=role.id,
group=group_id,
project=project_id)
groups_added += 1
groups_to_modify -= groups_added
return True
except Exception:
exceptions.handle(request,
_('Failed to modify %s project '
'members, update project groups '
'and update project quotas.')
% groups_to_modify)
return False
def _update_project_quota(self, request, data, project_id):
try:
super(UpdateProject, self)._update_project_quota(
request, data, project_id)
return True
except Exception:
exceptions.handle(request, _('Modified project information and '
'members, but unable to modify '
'project quotas.'))
return False
def handle(self, request, data):
# FIXME(gabriel): This should be refactored to use Python's built-in
# sets and do this all in a single "roles to add" and "roles to remove"
# pass instead of the multi-pass thing happening now.
project = self._update_project(request, data)
if not project:
return False
project_id = data['project_id']
# Use the domain_id from the project if available
domain_id = getattr(project, "domain_id", '')
ret = self._update_project_members(request, data, project_id)
if not ret:
return False
if PROJECT_GROUP_ENABLED:
ret = self._update_project_groups(request, data,
project_id, domain_id)
if not ret:
return False
ret = self._update_project_quota(request, data, project_id)
if not ret:
return False
return True
| apache-2.0 | 3,393,413,942,795,748,000 | 42.307951 | 79 | 0.524202 | false |
PEAT-AI/Crampy | speech_recognition/minimal.py | 1 | 1307 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# untitled.py
#
# Copyright 2014 linaro <linaro@cubietruck>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import subprocess
from espeak import espeak
a = str
b = str
def execute_unix(inputcommand):
p = subprocess.Popen(inputcommand, stdout=subprocess.PIPE, shell=True)
(output, err) = p.communicate()
return output
def test():
global a, b
a = "this is a speech test"
b = 'espeak --stdout > myaudio -ven+f3 -k2 -s150 --punct="<characters>" "%s" 2>>/dev/null' % a
b = 'espeak -f "%s" --stdout > myaudio' % a
execute_unix(b)
return b
test()
| gpl-3.0 | 8,081,347,438,708,105,000 | 28.044444 | 95 | 0.696251 | false |
umitproject/packet-manipulator | umit/pm/backend/umpa/context/send.py | 2 | 3710 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2008 Adriano Monteiro Marques
#
# Author: Francesco Piccinno <stack.box@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from umit.pm.backend.umpa import send_packet
from umit.pm.core.i18n import _
def register_send_context(BaseSendContext):
class SendContext(BaseSendContext):
def __init__(self, metapacket, count, inter, callback, udata):
BaseSendContext.__init__(self, metapacket, count, inter, callback, udata)
self.thread = None
self.internal = False
def _start(self):
if self.tot_count - self.count > 0:
self.state = self.RUNNING
self.internal = True
self.thread = send_packet(self.packet, self.tot_count - self.count, self.inter, \
self.__send_callback, self.udata)
return True
return False
def _resume(self):
if self.thread and self.thread.isAlive():
return False
return self._start()
def _restart(self):
if self.thread and self.thread.isAlive():
return False
self.count = 0
return self._start()
def _stop(self):
self.internal = False
return True
_pause = _stop
def __send_callback(self, packet, udata):
if packet and isinstance(packet, Exception):
self.internal = False
self.summary = str(packet)
else:
if packet:
self.count += 1
else:
self.state = self.NOT_RUNNING
if self.count == self.tot_count:
self.summary = _("%d packet(s) sent.") % self.tot_count
else:
self.summary = _("Sending packet %d of %d") % (self.count, self.tot_count)
self.percentage = float(self.count) / float(self.tot_count) * 100.0
if self.callback:
self.callback(packet, udata)
if not self.internal:
self.state = self.NOT_RUNNING
return self.state == self.NOT_RUNNING or \
self.state == self.PAUSED
#def pause(self):
# BaseSendContext.pause(self)
# self.thread.join()
#def stop(self):
# BaseSendContext.stop(self)
# self.thread.join()
def join(self):
self.thread.join()
self.running = False
return SendContext
| gpl-2.0 | 1,374,351,007,686,992,100 | 36.1 | 97 | 0.504313 | false |
mozilla/treeherder | treeherder/webapp/api/performance_serializers.py | 2 | 13497 | import decimal
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.db import transaction
from rest_framework import exceptions, serializers
from treeherder.model.models import Repository
from treeherder.perf.models import (
BackfillRecord,
IssueTracker,
PerformanceAlert,
PerformanceAlertSummary,
PerformanceBugTemplate,
PerformanceDatum,
PerformanceFramework,
PerformanceSignature,
PerformanceTag,
)
from treeherder.webapp.api.utils import to_timestamp
class OptionalBooleanField(serializers.BooleanField):
def __init__(self, *args, **kwargs):
kwargs['default'] = False
super().__init__(*args, **kwargs)
class PerformanceDecimalField(serializers.DecimalField):
def __init__(self, *args, **kwargs):
kwargs['max_digits'] = 20
kwargs['decimal_places'] = 2
kwargs['coerce_to_string'] = False
super().__init__(*args, **kwargs)
class TimestampField(serializers.Field):
def to_representation(self, value):
return to_timestamp(value.time)
class WordsField(serializers.CharField):
def to_representation(self, obj):
# if string's value is blank, just return nothing
if isinstance(obj, str):
return obj.split(' ')
return []
class BackfillRecordSerializer(serializers.Serializer):
context = serializers.JSONField()
class Meta:
model = BackfillRecord
fields = ('alert', 'context')
class PerformanceFrameworkSerializer(serializers.ModelSerializer):
class Meta:
model = PerformanceFramework
fields = ['id', 'name']
class PerformanceSignatureSerializer(serializers.ModelSerializer):
option_collection_hash = serializers.SlugRelatedField(
read_only=True, slug_field="option_collection_hash", source="option_collection"
)
machine_platform = serializers.SlugRelatedField(
read_only=True, slug_field="platform", source="platform"
)
tags = WordsField(read_only=True, allow_blank=True)
extra_options = WordsField(read_only=True, allow_blank=True)
measurement_unit = serializers.CharField(read_only=True)
suite_public_name = serializers.CharField(read_only=True, required=False)
test_public_name = serializers.CharField(read_only=True, required=False)
class Meta:
model = PerformanceSignature
fields = [
'id',
'framework_id',
'signature_hash',
'machine_platform',
'suite',
'test',
'lower_is_better',
'has_subtests',
'option_collection_hash',
'tags',
'extra_options',
'measurement_unit',
'suite_public_name',
'test_public_name',
]
class PerformanceAlertSerializer(serializers.ModelSerializer):
series_signature = PerformanceSignatureSerializer(read_only=True)
summary_id = serializers.SlugRelatedField(
slug_field="id",
source="summary",
required=False,
queryset=PerformanceAlertSummary.objects.all(),
)
related_summary_id = serializers.SlugRelatedField(
slug_field="id",
source="related_summary",
allow_null=True,
required=False,
queryset=PerformanceAlertSummary.objects.all(),
)
classifier = serializers.SlugRelatedField(
slug_field="username", allow_null=True, required=False, queryset=User.objects.all()
)
classifier_email = serializers.SerializerMethodField()
backfill_record = BackfillRecordSerializer(read_only=True, allow_null=True)
# Force `is_regression` to be an optional field, even when using PUT, since in
# Django 2.1 BooleanField no longer has an implicit `blank=True` on the model.
# TODO: Switch to using PATCH instead in the UI and the API tests.
is_regression = serializers.BooleanField(required=False)
# express quantities in terms of decimals to save space
amount_abs = PerformanceDecimalField(read_only=True)
amount_pct = PerformanceDecimalField(read_only=True)
t_value = PerformanceDecimalField(read_only=True)
prev_value = PerformanceDecimalField(read_only=True)
new_value = PerformanceDecimalField(read_only=True)
@transaction.atomic
def update(self, instance, validated_data):
# ensure the related summary, if set, has the same repository and
# framework as the original summary
related_summary = validated_data.get('related_summary')
if related_summary:
if (
validated_data.get('status', instance.status) != PerformanceAlert.DOWNSTREAM
and instance.summary.repository_id != related_summary.repository_id
):
raise exceptions.ValidationError(
"New summary's repository ({}) does not match existing "
"summary's repository ({})".format(
related_summary.repository, instance.summary.framework
)
)
elif instance.summary.framework_id != related_summary.framework_id:
raise exceptions.ValidationError(
"New summary's framework ({}) does not match existing "
"summary's framework ({})".format(
related_summary.framework, instance.summary.framework
)
)
status = validated_data.get('status')
if status and status in PerformanceAlert.RELATIONAL_STATUS_IDS:
# we've caught a downstream/reassignment: timestamp it
related_summary.timestamp_first_triage().save()
instance.timestamp_first_triage()
return super().update(instance, validated_data)
def get_classifier_email(self, performance_alert):
return getattr(performance_alert.classifier, 'email', None)
class Meta:
model = PerformanceAlert
fields = [
'id',
'status',
'series_signature',
'is_regression',
'prev_value',
'new_value',
't_value',
'amount_abs',
'amount_pct',
'summary_id',
'related_summary_id',
'manually_created',
'classifier',
'starred',
'classifier_email',
'backfill_record',
]
class PerformanceTagSerializer(serializers.ModelSerializer):
name = serializers.CharField(read_only=True)
class Meta:
model = PerformanceTag
fields = ['id', 'name']
class PerformanceAlertSummarySerializer(serializers.ModelSerializer):
alerts = PerformanceAlertSerializer(many=True, read_only=True)
related_alerts = PerformanceAlertSerializer(many=True, read_only=True)
performance_tags = serializers.SlugRelatedField(
many=True, required=False, slug_field='name', queryset=PerformanceTag.objects.all()
)
repository = serializers.SlugRelatedField(read_only=True, slug_field='name')
framework = serializers.SlugRelatedField(read_only=True, slug_field='id')
revision = serializers.SlugRelatedField(read_only=True, slug_field='revision', source='push')
push_timestamp = TimestampField(source='push', read_only=True)
prev_push_revision = serializers.SlugRelatedField(
read_only=True, slug_field='revision', source='prev_push'
)
assignee_username = serializers.SlugRelatedField(
slug_field="username",
source="assignee",
allow_null=True,
required=False,
queryset=User.objects.all(),
)
assignee_email = serializers.SerializerMethodField()
# marking these fields as readonly, the user should not be modifying them
# (after the item is first created, where we don't use this serializer
# class)
prev_push_id = serializers.ReadOnlyField()
push_id = serializers.ReadOnlyField()
created = serializers.ReadOnlyField()
def update(self, instance, validated_data):
instance.timestamp_first_triage()
return super().update(instance, validated_data)
def get_assignee_email(self, performance_alert_summary):
return getattr(performance_alert_summary.assignee, 'email', None)
class Meta:
model = PerformanceAlertSummary
fields = [
'id',
'push_id',
'prev_push_id',
'created',
'repository',
'framework',
'alerts',
'related_alerts',
'status',
'bug_number',
'bug_updated',
'issue_tracker',
'notes',
'revision',
'push_timestamp',
'prev_push_revision',
'assignee_username',
'assignee_email',
'performance_tags',
]
class PerformanceBugTemplateSerializer(serializers.ModelSerializer):
framework = serializers.SlugRelatedField(read_only=True, slug_field='id')
class Meta:
model = PerformanceBugTemplate
fields = [
'framework',
'keywords',
'status_whiteboard',
'default_component',
'default_product',
'cc_list',
'text',
]
class IssueTrackerSerializer(serializers.ModelSerializer):
text = serializers.CharField(read_only=True, source='name')
issueTrackerUrl = serializers.URLField(read_only=True, source='task_base_url')
class Meta:
model = IssueTracker
fields = ['id', 'text', 'issueTrackerUrl']
class PerformanceQueryParamsSerializer(serializers.Serializer):
startday = serializers.DateTimeField(required=False, allow_null=True, default=None)
endday = serializers.DateTimeField(required=False, allow_null=True, default=None)
revision = serializers.CharField(required=False, allow_null=True, default=None)
repository = serializers.CharField()
framework = serializers.ListField(required=False, child=serializers.IntegerField(), default=[])
interval = serializers.IntegerField(required=False, allow_null=True, default=None)
parent_signature = serializers.CharField(required=False, allow_null=True, default=None)
signature = serializers.CharField(required=False, allow_null=True, default=None)
no_subtests = serializers.BooleanField(required=False)
all_data = OptionalBooleanField()
no_retriggers = OptionalBooleanField()
def validate(self, data):
if (
data['revision'] is None
and data['interval'] is None
and (data['startday'] is None or data['endday'] is None)
):
raise serializers.ValidationError(
'Required: revision, startday and endday or interval.'
)
return data
def validate_repository(self, repository):
try:
Repository.objects.get(name=repository)
except ObjectDoesNotExist:
raise serializers.ValidationError('{} does not exist.'.format(repository))
return repository
class PerformanceDatumSerializer(serializers.ModelSerializer):
revision = serializers.CharField(source='push__revision')
class Meta:
model = PerformanceDatum
fields = ['job_id', 'id', 'value', 'push_timestamp', 'push_id', 'revision']
class PerformanceSummarySerializer(serializers.ModelSerializer):
platform = serializers.CharField(source="platform__platform")
values = serializers.ListField(
child=serializers.DecimalField(
rounding=decimal.ROUND_HALF_EVEN,
decimal_places=2,
max_digits=None,
coerce_to_string=False,
),
default=[],
)
name = serializers.SerializerMethodField()
suite = serializers.CharField()
parent_signature = serializers.IntegerField(source="parent_signature_id")
signature_id = serializers.IntegerField(source="id")
job_ids = serializers.ListField(child=serializers.IntegerField(), default=[])
data = PerformanceDatumSerializer(read_only=True, many=True, default=[])
repository_name = serializers.CharField()
class Meta:
model = PerformanceSignature
fields = [
'signature_id',
'framework_id',
'signature_hash',
'platform',
'test',
'suite',
'lower_is_better',
'has_subtests',
'tags',
'values',
'name',
'parent_signature',
'job_ids',
'repository_name',
'repository_id',
'data',
'measurement_unit',
'application',
]
def get_name(self, value):
test = value['test']
suite = value['suite']
test_suite = suite if test == '' or test == suite else '{} {}'.format(suite, test)
return '{} {} {}'.format(test_suite, value['option_name'], value['extra_options'])
class TestSuiteHealthParamsSerializer(serializers.Serializer):
framework = serializers.CharField(default=None)
class CommaSeparatedField(serializers.Field):
def to_representation(self, value):
return value.split(',')
class TestSuiteHealthSerializer(serializers.Serializer):
test = serializers.CharField()
suite = serializers.CharField()
platforms = CommaSeparatedField()
repositories = CommaSeparatedField()
total_alerts = serializers.IntegerField()
| mpl-2.0 | 1,615,358,247,940,246,500 | 33.875969 | 99 | 0.636586 | false |
smithev/check_AMQP | check_amqp.py | 1 | 5533 | #!/usr/bin/python2.6
'''
Software License Agreement (BSD License)
Copyright (c) 2013, Smith Electric Vehicles (Europe) Ltd
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of the {organization} nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Dependencies:
py-amqplib : http://code.google.com/p/py-amqplib/
'''
from amqplib import client_0_8 as amqp
import sys
import random
import time
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--host", action="store", type="string", dest="host", default="localhost")
parser.add_option("--port", action="store", type="int", dest="port", default=5672)
parser.add_option("--ssl", action="store_true", dest="ssl", default=False)
parser.add_option("--vhost", action="store", type="string", dest="vhost", default="/")
parser.add_option("--queue", action="store", type="string", dest="queue", default="monitoring_queue")
parser.add_option("--user", action="store", type="string", dest="user", default="guest")
parser.add_option("--password", action="store", type="string", dest="password", default="guest")
parser.add_option("--critical", action="store", type="float", dest="critical", metavar="SECONDS", default=4.0)
parser.add_option("--warning", action="store", type="float", dest="warning", metavar="SECONDS", default=2.0)
(options, args) = parser.parse_args(sys.argv)
# Connection details go here
amqpServer = "%s:%i" % (options.host, options.port)
amqpQueue = "%s" % options.queue
amqpVhost = options.vhost
amqpSsl = options.ssl
amqpUid = options.user
amqpPass = options.password
# Number of seconds before message is considered timed out
timeout = options.critical
# Number of seconds before the received message is considered late and a warning is raised
receivedTimeWarning = options.warning
# Function to check the header of a passed message and check it. If it matches the sent message
# the function checks the time it took to arrive and exits with the apropriate state. If the message does not
# match the sent message ID it is discarded.
def receive_callback(msg):
recTime = time.time()
recMessageID = msg.application_headers['messID']
timeDiff = recTime - sendTime
if recMessageID == messageID:
amqpChan.close()
amqpConn.close()
if timeDiff > timeout:
print "CRITICAL - Test message received in %s seconds|roundtrip=%s" % (timeDiff, timeDiff)
sys.exit(2)
if timeDiff > receivedTimeWarning:
print "WARNING - Test message received in %s seconds|roundtrip=%s" % (timeDiff, timeDiff)
sys.exit(1)
if timeDiff < receivedTimeWarning:
print "OK - Test message received in %s seconds|roundtrip=%s" % (timeDiff, timeDiff)
sys.exit(0)
pull_message()
# Funtion to pull a single message from the queue and continue checking for messages until the timeout is reached
def pull_message():
slept = 0
sleepInterval = 0.1
while slept < timeout:
msg = amqpChan.basic_get(amqpQueue)
if msg is not None:
amqpChan.basic_ack(msg.delivery_tag)
receive_callback(msg)
time.sleep(sleepInterval)
slept += sleepInterval
print "Timeout (%s seconds) expired while waiting for test message." % timeout
amqpChan.close()
amqpConn.close()
sys.exit(2)
# A try to test connection to the AMQP resource. If the connection fails the script exits with a critical exit status
#try:
amqpConn = amqp.Connection(host=amqpServer, userid=amqpUid, password=amqpPass, virtual_host=amqpVhost, insist=False, ssl=amqpSsl)
amqpChan = amqpConn.channel()
amqpChan.queue_declare(queue=amqpQueue, durable=True, auto_delete=False)
amqpChan.exchange_declare(exchange=amqpQueue, type="direct", durable=True, auto_delete=False)
amqpChan.queue_bind(queue=amqpQueue, exchange=amqpQueue, routing_key=amqpQueue)
# Generating a random message ID and sending a single message
messageID = str(random.randint(1, 1000000))
testMsg = amqp.Message(messageID, application_headers={'messID': messageID})
testMsg.properties["delivery_mode"] = 1
sendTime = time.time()
amqpChan.basic_publish(testMsg, exchange=amqpQueue, routing_key=amqpQueue)
pull_message()
| bsd-3-clause | -3,662,673,592,864,164,000 | 41.561538 | 129 | 0.739743 | false |
garhivelg/execom | src/execom/commands.py | 1 | 8056 | from app import manager, db
from faker import Factory
import random
import yaml
from .models import Protocol, Decision, Resolution
from case.models import Register, Case
@manager.command
def fill():
"Fill db with sample data"
fake = Factory.create('ru_RU')
registers = random.randint(10, 50)
for register_id in range(registers):
register = Register()
register.randomize(fake)
print("Record#%d of %d: %s" % (register_id, registers, register))
db.session.add(register)
db.session.commit()
cases = random.randint(1, 100)
for case_id in range(cases):
case = Case()
case.randomize(fake)
case.register = register
print("\tRecord#%d of %d: %s" % (register_id, registers, register))
print("\tCase#%d of %d: %s" % (case_id, cases, case))
db.session.add(case)
db.session.commit()
protocols = random.randint(1, 50)
for protocol_id in range(protocols):
protocol = Protocol()
protocol.randomize(fake)
protocol.case = case
print("\t\tRecord#%d of %d: %s" % (register_id, registers, register))
print("\t\tCase#%d of %d: %s" % (case_id, cases, case))
print("\t\tProtocol#%d of %d: %s" % (protocol_id, protocols, protocol))
db.session.add(protocol)
db.session.commit()
decisions = random.randint(1, 20)
for decision_id in range(decisions):
decision = Decision()
decision.randomize(fake)
decision.protocol = protocol
print("\t\t\tRecord#%d of %d: %s" % (register_id, registers, register))
print("\t\t\tCase#%d of %d: %s" % (case_id, cases, case))
print("\t\t\tProtocol#%d of %d: %s" % (protocol_id, protocols, protocol))
print("\t\t\tDecision#%d of %d: %s" % (decision_id, decisions, decision))
db.session.add(decision)
db.session.commit()
@manager.command
def export(output=None):
"Export data from db"
export_data = {'version': '1.0.0', }
export_data['registers'] = [{
'id': r.id,
'fund': r.fund,
'register': r.register,
} for r in Register.query.all()]
cases = Case.query.all()
export_data['cases'] = []
for case in cases:
case.normalize()
export_data['cases'].append({
'id': str(case.id),
'register': str(case.register.id),
'book': str(case.book_id),
'description': case.description,
})
protocols = Protocol.query.all()
export_data['protocols'] = []
for protocol in protocols:
protocol.normalize()
export_data['protocols'].append({
'id': protocol.id,
'protocol_id': protocol.protocol_txt,
'case': protocol.case_id,
'date': protocol.protocol_date,
'description': protocol.description,
})
decisions = Decision.query.all()
export_data['decisions'] = []
for decision in decisions:
decision.normalize()
export_data['decisions'].append({
'id': decision.id,
'protocol': decision.protocol_id,
'decision_id': decision.decision_id,
'date': decision.decision_date,
'topic': decision.topic,
'description': decision.description,
})
resolutions = Resolution.query.all()
export_data['resolutions'] = []
for resolution in resolutions:
resolution.normalize()
export_data['resolutions'].append({
'id': resolution.id,
'case': resolution.case_id,
'decision': resolution.decision_id,
'resolution_id': resolution.resolution_id,
'date': resolution.resolution_date,
'description': resolution.description,
})
print(export_data)
new_export_data = {
'version': export_data['version'],
'cases': export_data['cases'],
'protocols': export_data['protocols'],
'decisions': export_data['decisions'],
'resolutions': export_data['resolutions'],
}
print(yaml.dump(export_data, allow_unicode=True))
if output is not None:
print("Save to \"%s\"" % (output))
with open(output, "w") as outfile:
yaml.dump(export_data, outfile, default_flow_style=False, allow_unicode=True)
print("Saved to %s" % (output, ))
@manager.command
def import_yml(input=None):
"Import data from db"
if input is None:
print("No data to import")
return
else:
with open(input, 'r') as infile:
try:
print("Load from \"%s\"" % (input))
data = yaml.load(infile)
version = data.get('version')
if version == "1.0.0":
print(version)
registers = data.get('registers', [])
register_lookup = dict()
for r in registers:
fund = r.get('fund')
register = Register.query.filter_by(fund=fund, register=r.get('register')).first()
if register is None:
register = Register(fund=fund)
register.import_yml(r)
print("%s:\t%s" % (r.get('fund'), r))
register_lookup[r.get('id')] = register
db.session.add(register)
db.session.commit()
cases = data.get('cases', [])
case_lookup = dict()
for c in cases:
register = register_lookup.get(int(c.get('register')))
case = Case(register=register)
case.import_yml(c)
print("%s:\t%s" % (register, c))
case_lookup[int(c.get('id'))] = case
db.session.add(case)
db.session.commit()
protocols = data.get('protocols', [])
protocol_lookup = dict()
for p in protocols:
case = case_lookup.get(p.get('case'))
protocol = Protocol(case=case)
protocol.import_yml(p)
print("%s:\t%s" % (case, p))
protocol_lookup[int(p.get('id'))] = protocol
db.session.add(protocol)
db.session.commit()
decisions = data.get('decisions', [])
decision_lookup = dict()
for d in decisions:
protocol = protocol_lookup.get(d.get('protocol'))
decision = Decision(protocol=protocol)
decision.import_yml(d)
print("%s:\t%s" % (protocol, d))
decision_lookup[int(d.get('id'))] = decision
db.session.add(decision)
db.session.commit()
resolutions = data.get('resolutions', [])
for r in resolutions:
case = case_lookup.get(r.get('case'))
decision = decision_lookup.get(r.get('decision'))
resolution = Resolution(case=case, decision=decision)
resolution.import_yml(r)
print("%s, %s:\t%s" % (case, decision, r))
db.session.add(resolution)
db.session.commit()
print(register_lookup)
print(case_lookup)
print(protocol_lookup)
print("Loaded from \"%s\"" % (input))
except yaml.YAMLError as exc:
print(exc)
| gpl-3.0 | 830,730,716,799,628,300 | 37 | 106 | 0.492552 | false |
delitamakanda/jobboard | inventory/migrations/0001_initial.py | 1 | 2058 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-16 20:31
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=200)),
('slug', models.SlugField(max_length=200, unique=True)),
],
options={
'ordering': ('name',),
'verbose_name_plural': 'categories',
'verbose_name': 'category',
},
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=200)),
('slug', models.SlugField(max_length=200, unique=True)),
('image', models.ImageField(blank=True, upload_to='products/%Y/%m/%d')),
('description', models.TextField(blank=True)),
('price', models.DecimalField(decimal_places=2, max_digits=10)),
('stock', models.PositiveIntegerField()),
('available', models.BooleanField(default=True)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='products', to='inventory.Category')),
],
options={
'ordering': ('name',),
},
),
migrations.AlterIndexTogether(
name='product',
index_together=set([('id', 'slug')]),
),
]
| mit | -3,481,898,657,348,503,000 | 37.830189 | 143 | 0.541788 | false |
buzztroll/unix-agent | src/dcm/agent/cmd/scrubber.py | 3 | 18101 | import argparse
import hashlib
import os
import pwd
import random
import re
import string
import subprocess
import sys
import tarfile
import tempfile
import uuid
import Crypto.Cipher.AES as AES
import Crypto.Random as Random
import Crypto.PublicKey.RSA as RSA
import dcm.agent.config as config
opts_msg = """DCM Agent Image Preparation.
*** THIS PROGRAM WILL DELETE SYSTEM FILES! ***
This program prepares the virtual instance on which it is running for the
creation of a safe image. Depending in the options given it will delete
logs, private keys, and other files that could contain secrets or other
information that could be damaging when running a child instance.
To backup all of the information that it will remove please use the
--rescue-tar option. This will first put the file in a tarball before removing
it. This file can then be untarred to restore the removed files. However,
this file should be manually copied off the server and then safely removed
before the image creation occurs.
It is recommended that this file is encrypted using a public key on this system.
It can then be decrypted by using the matching private key which should be
safely stored in a location off of this system. To encrypt the recovery tarball
use the -e option.
"""
_g_tarfile_output_message = """
******************************************************************************
The file %s contains secrets!
This file is useful for restoring any information that this program deleted,
however before creating an image from this running VM you must copy it off of
the server and then securely delete it!
******************************************************************************
"""
_g_public_key_message = """
******************************************************************************
When creating a restoring tarfile it is recommended that this file be encrypted
with a public key. This way if it is burnt into a child VM image it cannot
be seen by any parties that may boot that image in the future. To restore the
rescue file the associated private key (which should not be on this system can
be used.
******************************************************************************
"""
def setup_command_line_parser():
parser = argparse.ArgumentParser(description=opts_msg)
parser.add_argument("-v", "--verbose",
help="Increase the amount of output.",
action='count', default=1)
parser.add_argument("-r", "--rescue-file",
help="Create a tarball that can be used to recover the secrets that this will erase.",
default=None)
parser.add_argument("-e", "--public-key",
help="A path to the public encryption key that will be used to encrypt this file.",
default=None)
parser.add_argument("-c", "--cloud-init",
help="Delete cloud-init cache and logs.",
action="store_true")
parser.add_argument("-d", "--dhcp", help="Delete cached dhcp leases.",
action="store_true")
parser.add_argument("-H", "--history", help="Delete history files.",
action="store_true")
parser.add_argument("-k", "--private-keys",
help="Delete private key files.",
action="store_true")
parser.add_argument("-a", "--authorized-keys",
help="Delete authorized key files.",
action="store_true")
parser.add_argument("-A", "--agent",
help="Delete dcm agent files.",
action="store_true")
parser.add_argument("-t", "--agent-token",
help="Delete dcm agent token. This is recommended but off by default because the current instance will not be able to talk to DCM without it.",
action="store_true")
parser.add_argument("-X", "--agent_running",
help=argparse.SUPPRESS,
action="store_true")
parser.add_argument("-l", "--clean-logs", help="Delete system log files.",
action="store_true")
parser.add_argument("-b", "--batch",
help="Run the program without interrupting the user. This could cause their to be no rescue file.",
action="store_true")
parser.add_argument("-D", "--dry-run",
help="Run the program without actually deleting any files.",
action="store_true")
return parser
def console_output(opts, level, *args):
if level > opts.verbose:
return
print(*args)
def get_secure_delete():
possible_locations = ['/usr/bin/srm',
'/usr/sbin/srm',
'/usr/local/bin/srm']
srm_path = None
for p in possible_locations:
if os.path.exists(p):
srm_path = p
break
def srm_remove(opts, tar, path):
if not os.path.exists(path):
console_output(
opts, 1, "Skipping %s because it does not exist" % path)
return
if tar is not None:
tar.add(path)
if opts.dry_run:
return
console_output(opts, 2, "Securely deleting %s" % path)
rc = os.system("%s -f -z %s" % (srm_path, path))
if rc != 0:
raise Exception("Failed to remove %s" % path)
def python_rm(opts, tar, path):
if not os.path.exists(path):
console_output(
opts, 1, "Skipping %s because it does not exist" % path)
return
if tar is not None:
tar.add(path)
console_output(opts, 2, "Deleting %s" % path)
if opts.dry_run:
return
os.remove(path)
if srm_path:
return srm_remove
return python_rm
secure_delete = get_secure_delete()
def delete_history(opts, tar):
console_output(opts, 1, "Deleting all users history files...")
lookfor = '\..*hist.*'
all_users = pwd.getpwall()
user_homes = [user_home[5] for user_home in all_users]
for base_dir in user_homes:
for (dirpath, dirname, filenames) in os.walk(base_dir):
for f in filenames:
if re.match(lookfor, f):
secure_delete(opts, tar, os.path.join(dirpath, f))
def is_privatekey(keyfile):
with open(keyfile, 'r') as f:
if f.readline() == '-----BEGIN RSA PRIVATE KEY-----\n':
return True
return False
def delete_private_keys(opts, tar):
console_output(opts, 1, "Deleting all users private key files...")
all_users = pwd.getpwall()
user_homes = [user_home[5] for user_home in all_users]
for base_dir in user_homes:
ssh_dir = os.path.join(base_dir, '.ssh')
for (dirpath, dirname, filenames) in os.walk(ssh_dir):
for f in filenames:
filepath = os.path.join(dirpath, f)
if is_privatekey(filepath):
secure_delete(opts, tar, filepath)
def delete_authorize_keys(opts, tar):
console_output(opts, 1, "Deleting all users authorized key files...")
all_users = pwd.getpwall()
user_homes = [user_home[5] for user_home in all_users]
for base_dir in user_homes:
ssh_authorized_keys = os.path.join(base_dir, '.ssh/authorized_keys')
if os.path.exists(ssh_authorized_keys):
secure_delete(opts, tar, ssh_authorized_keys)
def delete_cloud_init_cache(opts, tar):
console_output(opts, 1, "Deleting cloud-init data files...")
cloud_init_data_path = "/var/lib/cloud/data/"
for (dirpath, dirname, filenames) in os.walk(cloud_init_data_path):
for file in filenames:
filepath = os.path.join(dirpath, file)
if is_privatekey(filepath):
secure_delete(opts, tar, filepath)
def clean_logs(opts, tar):
lookfor_strs = ['.*\.log', '.*\.gz']
dir_list = ['/var/log',]
for base_dir in dir_list:
for (dirpath, dirname, filename) in os.walk(base_dir):
for f in filename:
found = False
for lookfor in lookfor_strs:
if re.match(lookfor, f):
found = True
break
if found:
filepath = os.path.join(dirpath, f)
secure_delete(opts, tar, filepath)
def clean_agent_logs(opts, tar, log_dir):
for (dirpath, dirname, filenames) in os.walk(log_dir):
for f in filenames:
abs_path = os.path.join(dirpath, f)
secure_delete(opts, tar, abs_path)
def clean_agent_files(opts, tar):
console_output(opts, 2, "Cleaning the agent files.")
files_to_clean = ['/var/lib/waagent/provisioned',
'/tmp/boot.log',
'/tmp/agent_info.tar.gz',
'/tmp/meta_info.txt',
'/tmp/process_info.txt',
'/tmp/startup_script.txt',
'/tmp/error.log',
'/tmp/installer.sh']
conf = config.AgentConfig(config.get_config_files())
log_dir = os.path.join(conf.storage_base_dir, "logs")
if not opts.agent_running:
clean_agent_logs(opts, tar, log_dir)
files_to_clean.append(conf.storage_dbfile)
for f in files_to_clean:
if os.path.exists(f):
secure_delete(opts, tar, f)
def general_cleanup(opts, tar):
console_output(opts, 1, "Performing a general cleanup...")
files_to_clean = ['/var/lib/waagent/provisioned']
for f in files_to_clean:
if os.path.exists(f):
secure_delete(opts, tar, f)
def clean_dhcp_leases(opts, tar):
lookfor_strs = ['.*\.lease*', '.*\.info']
potential_paths = ['/var/lib/dhcp',
'/var/lib/dhcp3',
'/var/lib/dhclient',
'/var/lib/dhcpcd']
for p in potential_paths:
for (dirpath, dirname, filename) in os.walk(p):
for f in filename:
found = False
for lookfor in lookfor_strs:
if re.match(lookfor, f):
found = True
break
if found:
filepath = os.path.join(dirpath, f)
secure_delete(opts, tar, filepath)
def get_get_public_key_path(opts):
if opts.batch or opts.public_key is not None:
return opts.public_key
sys.stdout.write(_g_public_key_message)
sys.stdout.write("Would you like to encrypt with the public key (Y/n)? ")
sys.stdout.flush()
answer = sys.stdin.readline().strip()
if answer.lower() != 'y' and answer.lower() != "yes":
return None
key_path = os.path.expanduser("~/.ssh/id_rsa.pub")
sys.stdout.write(
"Please enter the path to the public key to use for encryption (%s): "
% key_path)
sys.stdout.flush()
answer = sys.stdin.readline().strip()
if answer:
key_path = answer
if not os.path.exists(key_path):
raise Exception("The key path %s does not exist." % key_path)
return key_path
def get_public_key_data(opts):
if opts.rescue_file is None:
# nothing to encrypt if there is no tar
return None
public_key_path = get_get_public_key_path(opts)
if not public_key_path:
return None
console_output(opts, 1, "Using the public key %s" % public_key_path)
try:
with open(public_key_path, "r") as fptr:
return fptr.readline()
except IOError:
raise Exception("The public key file %s could not be read."
% public_key_path)
def get_rescue_path(opts):
if opts.rescue_file is not None:
return os.path.abspath(opts.rescue_file)
if opts.batch:
return None
sys.stdout.write("Please enter the location of the rescue tarfile:")
sys.stdout.flush()
rescue_path = sys.stdin.readline().strip()
if not rescue_path:
return None
rescue_path = os.path.abspath(rescue_path)
return rescue_path
def get_tar(opts, rescue_path):
if rescue_path is None:
return None, None
console_output(opts, 1, "Using the rescue file %s" % rescue_path)
osf, tarfile_path = tempfile.mkstemp()
os.close(osf)
tar = tarfile.open(tarfile_path, "w:gz")
return tarfile_path, tar
def generate_symmetric_key():
symmetric_key = str(uuid.uuid4()) + ''.join(random.choice(
string.ascii_letters + string.digits + "-_!@#^(),.=+")
for _ in range(10))
return symmetric_key
def derive_key_and_iv(password, salt, key_length, iv_length):
d = d_i = b''
while len(d) < key_length + iv_length:
d_i = hashlib.md5(d_i + password + salt).digest()
d += d_i
return d[:key_length], d[key_length:key_length+iv_length]
def encrypt(in_file, out_file, password, key_length=32):
bs = AES.block_size
salted_bytes = 'Salted__'.encode()
salt = Random.new().read(bs - len(salted_bytes))
key, iv = derive_key_and_iv(password.encode(), salt, key_length, bs)
cipher = AES.new(key, AES.MODE_CBC, iv)
out_file.write(salted_bytes + salt)
finished = False
while not finished:
chunk = in_file.read(1024 * bs)
if len(chunk) == 0 or len(chunk) % bs != 0:
padding_length = (bs - len(chunk) % bs) or bs
chunk += padding_length * chr(padding_length).encode()
finished = True
out_file.write(cipher.encrypt(chunk))
def _write_temp_file(data):
osf, temp_file_path = tempfile.mkstemp()
try:
os.write(osf, data)
return temp_file_path
finally:
os.close(osf)
def encrypt_symmetric_key_with_public_key(symmetric_key, public_key):
rsa_pk = RSA.importKey(public_key)
pk = rsa_pk.publickey()
pem_pub = pk.exportKey(format='PEM')
public_key_file = _write_temp_file(pem_pub)
try:
openssl_binary_location = "openssl"
args = [openssl_binary_location,
'rsautl', '-encrypt', '-pubin',
'-inkey', public_key_file]
print(' '.join(args))
process = subprocess.Popen(' '.join(args),
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
(stdout, stderr) = process.communicate(symmetric_key.encode())
rc = process.wait()
if rc != 0:
raise Exception("Public key encryption failed: %s" % stderr)
return stdout
finally:
os.remove(public_key_file)
def encrypt_with_key(tarfile_path, public_key):
if public_key is None or tarfile_path is None:
return tarfile_path, None
# first generate the symmetric key to encrypt
symmetric_key = generate_symmetric_key()
encrypted_key = encrypt_symmetric_key_with_public_key(
symmetric_key, public_key)
osf, temp_path = tempfile.mkstemp()
try:
with open(tarfile_path, "rb") as tar_fptr,\
os.fdopen(osf, "wb") as out_fptr:
encrypt(tar_fptr, out_fptr, symmetric_key)
return temp_path, encrypted_key
finally:
os.remove(tarfile_path)
def make_rescue_file(data_tar_path, rescue_file_destination_path,
encrypted_key=None, public_key=None):
# find the recovery
recovery_script_path = os.path.join(config.get_python_script_dir(),
"recovery.sh")
temp_key_path = None
temp_key_name_path = None
try:
tar = tarfile.open(rescue_file_destination_path, "w:gz")
if encrypted_key is not None:
temp_key_path = _write_temp_file(encrypted_key)
tar.add(temp_key_path, arcname='key')
if public_key is not None:
temp_key_name_path = _write_temp_file(public_key.encode())
tar.add(temp_key_name_path, arcname='public_key')
tar.add(data_tar_path, arcname='data.enc')
tar.add(recovery_script_path, arcname='recovery.sh')
tar.close()
finally:
if temp_key_path is not None:
os.remove(temp_key_path)
if temp_key_name_path is not None:
os.remove(temp_key_name_path)
os.remove(data_tar_path)
def main(args=sys.argv):
parser = setup_command_line_parser()
opts = parser.parse_args(args=args[1:])
public_key_data = get_public_key_data(opts)
rescue_path = get_rescue_path(opts)
(tarfile_path, tar) = get_tar(opts, rescue_path)
try:
if opts.history:
delete_history(opts, tar)
if opts.private_keys:
delete_private_keys(opts, tar)
if opts.authorized_keys:
delete_authorize_keys(opts, tar)
if opts.clean_logs:
clean_logs(opts, tar)
if opts.dhcp:
clean_dhcp_leases(opts, tar)
if opts.agent:
clean_agent_files(opts, tar)
if opts.agent_token:
try:
secure_delete(opts, tar, "/dcm/secure/token")
except FileNotFoundError:
console_output(opts, 1, "The token file does not exist.")
general_cleanup(opts, tar)
except BaseException as ex:
if tar is not None:
tar.close()
os.remove(tarfile_path)
console_output(opts, 0, "Error: " + str(ex))
if opts.verbose > 1:
raise
sys.exit(1)
else:
if tar is not None:
tar.close()
tarfile_path, encrypted_key =\
encrypt_with_key(tarfile_path, public_key_data)
make_rescue_file(tarfile_path, rescue_path,
encrypted_key=encrypted_key,
public_key=public_key_data)
console_output(
opts, 0,
_g_tarfile_output_message % rescue_path) | apache-2.0 | 8,373,570,479,691,780,000 | 33.878613 | 168 | 0.57041 | false |
sk7/django-guardian | guardian/migrations/0003_update_objectpermission_object_pk.py | 85 | 5664 | # encoding: utf-8
from south.v2 import DataMigration
from guardian.compat import user_model_label
class Migration(DataMigration):
def forwards(self, orm):
"""
Updates ``object_pk`` fields on both ``UserObjectPermission`` and
``GroupObjectPermission`` from ``object_id`` values.
"""
for Model in [orm.UserObjectPermission, orm.GroupObjectPermission]:
for obj in Model.objects.all():
obj.object_pk = str(obj.object_id)
obj.save()
def backwards(self, orm):
"""
Updates ``object_id`` fields on both ``UserObjectPermission`` and
``GroupObjectPermission`` from ``object_pk`` values.
"""
for Model in [orm.UserObjectPermission, orm.GroupObjectPermission]:
for obj in Model.objects.all():
obj.object_id = int(obj.object_pk)
obj.save()
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
user_model_label: {
'Meta': {'object_name': user_model_label.split('.')[-1]},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'guardian.groupobjectpermission': {
'Meta': {'unique_together': "(['group', 'permission', 'content_type', 'object_id'],)", 'object_name': 'GroupObjectPermission'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'object_pk': ('django.db.models.fields.TextField', [], {'default': "''"}),
'permission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Permission']"})
},
'guardian.userobjectpermission': {
'Meta': {'unique_together': "(['user', 'permission', 'content_type', 'object_id'],)", 'object_name': 'UserObjectPermission'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'object_pk': ('django.db.models.fields.TextField', [], {'default': "''"}),
'permission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Permission']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_model_label})
}
}
complete_apps = ['guardian']
| bsd-2-clause | 3,609,451,541,221,750,300 | 64.860465 | 182 | 0.560734 | false |
roadmapper/ansible | lib/ansible/modules/cloud/azure/azure_rm_resource_info.py | 28 | 17641 | #!/usr/bin/python
#
# Copyright (c) 2018 Zim Kalinowski, <zikalino@microsoft.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_resource_info
version_added: "2.9"
short_description: Generic facts of Azure resources
description:
- Obtain facts of any resource using Azure REST API.
- This module gives access to resources that are not supported via Ansible modules.
- Refer to U(https://docs.microsoft.com/en-us/rest/api/) regarding details related to specific resource REST API.
options:
url:
description:
- Azure RM Resource URL.
api_version:
description:
- Specific API version to be used.
provider:
description:
- Provider type, should be specified in no URL is given.
resource_group:
description:
- Resource group to be used.
- Required if URL is not specified.
resource_type:
description:
- Resource type.
resource_name:
description:
- Resource name.
subresource:
description:
- List of subresources.
suboptions:
namespace:
description:
- Subresource namespace.
type:
description:
- Subresource type.
name:
description:
- Subresource name.
extends_documentation_fragment:
- azure
author:
- Zim Kalinowski (@zikalino)
'''
EXAMPLES = '''
- name: Get scaleset info
azure_rm_resource_info:
resource_group: myResourceGroup
provider: compute
resource_type: virtualmachinescalesets
resource_name: myVmss
api_version: "2017-12-01"
- name: Query all the resources in the resource group
azure_rm_resource_info:
resource_group: "{{ resource_group }}"
resource_type: resources
'''
RETURN = '''
response:
description:
- Response specific to resource type.
returned: always
type: complex
contains:
id:
description:
- Id of the Azure resource.
type: str
returned: always
sample: "/subscriptions/xxxx...xxxx/resourceGroups/v-xisuRG/providers/Microsoft.Compute/virtualMachines/myVM"
location:
description:
- Resource location.
type: str
returned: always
sample: eastus
name:
description:
- Resource name.
type: str
returned: always
sample: myVM
properties:
description:
- Specifies the virtual machine's property.
type: complex
returned: always
contains:
diagnosticsProfile:
description:
- Specifies the boot diagnostic settings state.
type: complex
returned: always
contains:
bootDiagnostics:
description:
- A debugging feature, which to view Console Output and Screenshot to diagnose VM status.
type: dict
returned: always
sample: {
"enabled": true,
"storageUri": "https://vxisurgdiag.blob.core.windows.net/"
}
hardwareProfile:
description:
- Specifies the hardware settings for the virtual machine.
type: dict
returned: always
sample: {
"vmSize": "Standard_D2s_v3"
}
networkProfile:
description:
- Specifies the network interfaces of the virtual machine.
type: complex
returned: always
contains:
networkInterfaces:
description:
- Describes a network interface reference.
type: list
returned: always
sample:
- {
"id": "/subscriptions/xxxx...xxxx/resourceGroups/v-xisuRG/providers/Microsoft.Network/networkInterfaces/myvm441"
}
osProfile:
description:
- Specifies the operating system settings for the virtual machine.
type: complex
returned: always
contains:
adminUsername:
description:
- Specifies the name of the administrator account.
type: str
returned: always
sample: azureuser
allowExtensionOperations:
description:
- Specifies whether extension operations should be allowed on the virtual machine.
- This may only be set to False when no extensions are present on the virtual machine.
type: bool
returned: always
sample: true
computerName:
description:
- Specifies the host OS name of the virtual machine.
type: str
returned: always
sample: myVM
requireGuestProvisionSignale:
description:
- Specifies the host require guest provision signal or not.
type: bool
returned: always
sample: true
secrets:
description:
- Specifies set of certificates that should be installed onto the virtual machine.
type: list
returned: always
sample: []
linuxConfiguration:
description:
- Specifies the Linux operating system settings on the virtual machine.
type: dict
returned: when OS type is Linux
sample: {
"disablePasswordAuthentication": false,
"provisionVMAgent": true
}
provisioningState:
description:
- The provisioning state.
type: str
returned: always
sample: Succeeded
vmID:
description:
- Specifies the VM unique ID which is a 128-bits identifier that is encoded and stored in all Azure laaS VMs SMBIOS.
- It can be read using platform BIOS commands.
type: str
returned: always
sample: "eb86d9bb-6725-4787-a487-2e497d5b340c"
storageProfile:
description:
- Specifies the storage account type for the managed disk.
type: complex
returned: always
contains:
dataDisks:
description:
- Specifies the parameters that are used to add a data disk to virtual machine.
type: list
returned: always
sample:
- {
"caching": "None",
"createOption": "Attach",
"diskSizeGB": 1023,
"lun": 2,
"managedDisk": {
"id": "/subscriptions/xxxx....xxxx/resourceGroups/V-XISURG/providers/Microsoft.Compute/disks/testdisk2",
"storageAccountType": "StandardSSD_LRS"
},
"name": "testdisk2"
}
- {
"caching": "None",
"createOption": "Attach",
"diskSizeGB": 1023,
"lun": 1,
"managedDisk": {
"id": "/subscriptions/xxxx...xxxx/resourceGroups/V-XISURG/providers/Microsoft.Compute/disks/testdisk3",
"storageAccountType": "StandardSSD_LRS"
},
"name": "testdisk3"
}
imageReference:
description:
- Specifies information about the image to use.
type: dict
returned: always
sample: {
"offer": "UbuntuServer",
"publisher": "Canonical",
"sku": "18.04-LTS",
"version": "latest"
}
osDisk:
description:
- Specifies information about the operating system disk used by the virtual machine.
type: dict
returned: always
sample: {
"caching": "ReadWrite",
"createOption": "FromImage",
"diskSizeGB": 30,
"managedDisk": {
"id": "/subscriptions/xxx...xxxx/resourceGroups/v-xisuRG/providers/Microsoft.Compute/disks/myVM_disk1_xxx",
"storageAccountType": "Premium_LRS"
},
"name": "myVM_disk1_xxx",
"osType": "Linux"
}
type:
description:
- The type of identity used for the virtual machine.
type: str
returned: always
sample: "Microsoft.Compute/virtualMachines"
'''
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
from ansible.module_utils.azure_rm_common_rest import GenericRestClient
try:
from msrestazure.azure_exceptions import CloudError
from msrest.service_client import ServiceClient
from msrestazure.tools import resource_id, is_valid_resource_id
import json
except ImportError:
# This is handled in azure_rm_common
pass
class AzureRMResourceInfo(AzureRMModuleBase):
def __init__(self):
# define user inputs into argument
self.module_arg_spec = dict(
url=dict(
type='str'
),
provider=dict(
type='str'
),
resource_group=dict(
type='str'
),
resource_type=dict(
type='str'
),
resource_name=dict(
type='str'
),
subresource=dict(
type='list',
default=[]
),
api_version=dict(
type='str'
)
)
# store the results of the module operation
self.results = dict(
response=[]
)
self.mgmt_client = None
self.url = None
self.api_version = None
self.provider = None
self.resource_group = None
self.resource_type = None
self.resource_name = None
self.subresource = []
super(AzureRMResourceInfo, self).__init__(self.module_arg_spec, supports_tags=False)
def exec_module(self, **kwargs):
is_old_facts = self.module._name == 'azure_rm_resource_facts'
if is_old_facts:
self.module.deprecate("The 'azure_rm_resource_facts' module has been renamed to 'azure_rm_resource_info'", version='2.13')
for key in self.module_arg_spec:
setattr(self, key, kwargs[key])
self.mgmt_client = self.get_mgmt_svc_client(GenericRestClient,
base_url=self._cloud_environment.endpoints.resource_manager)
if self.url is None:
orphan = None
rargs = dict()
rargs['subscription'] = self.subscription_id
rargs['resource_group'] = self.resource_group
if not (self.provider is None or self.provider.lower().startswith('.microsoft')):
rargs['namespace'] = "Microsoft." + self.provider
else:
rargs['namespace'] = self.provider
if self.resource_type is not None and self.resource_name is not None:
rargs['type'] = self.resource_type
rargs['name'] = self.resource_name
for i in range(len(self.subresource)):
resource_ns = self.subresource[i].get('namespace', None)
resource_type = self.subresource[i].get('type', None)
resource_name = self.subresource[i].get('name', None)
if resource_type is not None and resource_name is not None:
rargs['child_namespace_' + str(i + 1)] = resource_ns
rargs['child_type_' + str(i + 1)] = resource_type
rargs['child_name_' + str(i + 1)] = resource_name
else:
orphan = resource_type
else:
orphan = self.resource_type
self.url = resource_id(**rargs)
if orphan is not None:
self.url += '/' + orphan
# if api_version was not specified, get latest one
if not self.api_version:
try:
# extract provider and resource type
if "/providers/" in self.url:
provider = self.url.split("/providers/")[1].split("/")[0]
resourceType = self.url.split(provider + "/")[1].split("/")[0]
url = "/subscriptions/" + self.subscription_id + "/providers/" + provider
api_versions = json.loads(self.mgmt_client.query(url, "GET", {'api-version': '2015-01-01'}, None, None, [200], 0, 0).text)
for rt in api_versions['resourceTypes']:
if rt['resourceType'].lower() == resourceType.lower():
self.api_version = rt['apiVersions'][0]
break
else:
# if there's no provider in API version, assume Microsoft.Resources
self.api_version = '2018-05-01'
if not self.api_version:
self.fail("Couldn't find api version for {0}/{1}".format(provider, resourceType))
except Exception as exc:
self.fail("Failed to obtain API version: {0}".format(str(exc)))
self.results['url'] = self.url
query_parameters = {}
query_parameters['api-version'] = self.api_version
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
skiptoken = None
while True:
if skiptoken:
query_parameters['skiptoken'] = skiptoken
response = self.mgmt_client.query(self.url, "GET", query_parameters, header_parameters, None, [200, 404], 0, 0)
try:
response = json.loads(response.text)
if isinstance(response, dict):
if response.get('value'):
self.results['response'] = self.results['response'] + response['value']
skiptoken = response.get('nextLink')
else:
self.results['response'] = self.results['response'] + [response]
except Exception as e:
self.fail('Failed to parse response: ' + str(e))
if not skiptoken:
break
return self.results
def main():
AzureRMResourceInfo()
if __name__ == '__main__':
main()
| gpl-3.0 | 8,602,905,250,575,678,000 | 39.930394 | 157 | 0.453319 | false |
eaudeweb/lcc-toolkit | lcc/management/commands/import_questions.py | 1 | 3129 | from openpyxl import Workbook, load_workbook
from django.core.management import call_command
from django.core.management.base import BaseCommand
from lcc.models import (
TaxonomyClassification,
Question,
Gap,
)
class Command(BaseCommand):
help = """
Import Questions and Gaps from an excel file.
File example in commands/example/questions.xlsx
"""
def add_arguments(self, parser):
parser.add_argument('file', type=str)
parser.add_argument(
"--dry-run",
action="store_true",
default=False,
help="Only parse the data, but do not insert it."
)
def parse_row(self, row):
return {
'text': row[0].value.strip(),
'level': row[1].value,
'parent_answer': row[2].value,
'classification': row[3].value,
'gap_answer': row[4].value,
'gap_classifications': str(row[5].value).split(','),
}
def create_question(self, data, parents_by_level):
if data['level'] == 0:
parent = None
else:
parent = parents_by_level[data['level'] - 1]
classification = TaxonomyClassification.objects.get(
code=data['classification']
)
question= Question.objects.filter(
classification=classification,
parent=parent
).first()
if question:
parents_by_level[data['level']] = question
print("Question for {} already created.".format(classification))
return
print(
"Creating question for {} with parent {}".format(
classification, parent
)
)
if not options["dry_run"]:
question = Question.objects.create(
text=data['text'],
parent=parent,
parent_answer=data['parent_answer'],
classification=classification
)
return question
def create_gap(self, data, question):
gap_classifications = []
for code in data['gap_classifications']:
classification = TaxonomyClassification.objects.get(code=code)
gap_classifications.append(classification)
gap = Gap.objects.create(on=data['gap_answer'], question=question)
for classification in gap_classifications:
gap.classifications.add(classification)
def handle(self, file, *args, **options):
wb = load_workbook(file, read_only=True)
sheet = wb.active
parents_by_level = [None] * 10
for row in sheet:
data = self.parse_row(row)
try:
question = self.create_question(data, parents_by_level)
if not question:
continue
parents_by_level[data['level']] = question
self.create_gap(data, question)
except Exception as e:
print("Failed to create question for {} with error {}".format(
data['classification'], str(e)))
| gpl-3.0 | 4,310,738,170,072,382,500 | 32.287234 | 78 | 0.553212 | false |
chenghao/haoAdmin | models/__init__.py | 1 | 4319 | # coding:utf-8
__author__ = "gaunt"
import conf
from playhouse.pool import PooledMySQLDatabase
from peewee import Model, AutoField, BigAutoField, CharField, IntegerField, DateTimeField, SQL
database = PooledMySQLDatabase(conf.mysql_db, **conf.mysql_conn_param)
class UnknownField(object):
def __init__(self, *_, **__): pass
class BaseModel(Model):
class Meta:
database = database
class SysAuthorities(BaseModel):
authority = CharField(primary_key=True)
authority_name = CharField()
create_time = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
parent_name = CharField(null=True)
sort = IntegerField(constraints=[SQL("DEFAULT 0")])
class Meta:
table_name = 'sys_authorities'
class SysConfig(BaseModel):
config_name = CharField()
config_value = CharField()
mark = CharField(null=True)
class Meta:
table_name = 'sys_config'
class SysDictData(BaseModel):
del_state = IntegerField(constraints=[SQL("DEFAULT 0")], null=True)
name = CharField(null=True)
parent_id = IntegerField(null=True)
sort = IntegerField(null=True)
value = CharField(null=True)
class Meta:
table_name = 'sys_dict_data'
class SysMenu(BaseModel):
authority = CharField(null=True)
create_time = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
menu_icon = CharField(null=True)
menu_id = AutoField()
menu_name = CharField()
menu_url = CharField(null=True)
parent_id = IntegerField(constraints=[SQL("DEFAULT -1")])
sort_number = IntegerField(constraints=[SQL("DEFAULT 0")])
update_time = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
class Meta:
table_name = 'sys_menu'
class SysOperLog(BaseModel):
business_type = IntegerField(constraints=[SQL("DEFAULT 0")], null=True)
error_msg = CharField(constraints=[SQL("DEFAULT ''")], null=True)
id = BigAutoField()
method = CharField(constraints=[SQL("DEFAULT ''")], null=True)
oper_id = IntegerField(null=True)
oper_ip = CharField(constraints=[SQL("DEFAULT ''")], null=True)
oper_location = CharField(constraints=[SQL("DEFAULT ''")], null=True)
oper_param = CharField(constraints=[SQL("DEFAULT ''")], null=True)
oper_time = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], null=True)
oper_url = CharField(constraints=[SQL("DEFAULT ''")], null=True)
status = IntegerField(constraints=[SQL("DEFAULT 0")], null=True)
title = CharField(constraints=[SQL("DEFAULT ''")], null=True)
class Meta:
table_name = 'sys_oper_log'
class SysRole(BaseModel):
comments = CharField(null=True)
create_time = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
role_code = CharField(unique=True)
role_id = AutoField()
role_name = CharField()
update_time = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
class Meta:
table_name = 'sys_role'
class SysRoleAuthorities(BaseModel):
authority = CharField(index=True)
create_time = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], null=True)
role_id = IntegerField(index=True)
class Meta:
table_name = 'sys_role_authorities'
indexes = (
(('role_id', 'authority'), True),
)
class SysUser(BaseModel):
avatar = CharField(null=True)
create_time = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
department_id = IntegerField(null=True)
nick_name = CharField(null=True)
password = CharField()
phone = CharField(null=True)
sec_key = CharField(null=True)
sex = IntegerField(constraints=[SQL("DEFAULT 0")], null=True)
state = IntegerField(constraints=[SQL("DEFAULT 1")])
true_name = CharField(index=True, null=True)
update_time = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
user_id = AutoField()
username = CharField(unique=True)
class Meta:
table_name = 'sys_user'
class SysUserRole(BaseModel):
create_time = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
role_id = IntegerField(index=True)
user_id = IntegerField(index=True)
class Meta:
table_name = 'sys_user_role'
indexes = (
(('user_id', 'role_id'), True),
) | apache-2.0 | 3,332,453,988,538,100,000 | 30.532847 | 94 | 0.671683 | false |
Ms2ger/servo | tests/heartbeats/characterize.py | 27 | 10487 | #!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import sys
import os
from os import path
import time
import datetime
import argparse
import platform
import subprocess
TOP_DIR = path.join("..", "..")
GUARD_TIME = 10
HEARTBEAT_DEFAULT_WINDOW_SIZE = 20
# Use a larger window sizes to reduce or prevent writing log files until benchmark completion
# (profiler name, window size)
# These categories need to be kept aligned with ProfilerCategory in components/profile_traits/time.rs
HEARTBEAT_PROFILER_CATEGORIES = [
("Compositing", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("LayoutPerform", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("LayoutStyleRecalc", HEARTBEAT_DEFAULT_WINDOW_SIZE),
# ("LayoutTextShaping", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("LayoutRestyleDamagePropagation", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("LayoutNonIncrementalReset", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("LayoutSelectorMatch", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("LayoutTreeBuilder", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("LayoutDamagePropagate", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("LayoutGeneratedContent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("LayoutDisplayListSorting", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("LayoutFloatPlacementSpeculation", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("LayoutMain", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("LayoutStoreOverflow", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("LayoutParallelWarmup", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("LayoutDispListBuild", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("NetHTTPRequestResponse", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("PaintingPerTile", 50),
("PaintingPrepBuff", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("Painting", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ImageDecoding", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ImageSaving", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptAttachLayout", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptConstellationMsg", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptDevtoolsMsg", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptDocumentEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptDomEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptEvaluate", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptFileRead", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptImageCacheMsg", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptInputEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptNetworkEvent", 200),
("ScriptParseHTML", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptPlannedNavigation", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptResize", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptSetScrollState", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptSetViewport", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptTimerEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptStylesheetLoad", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptUpdateReplacedElement", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptWebSocketEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptWorkerEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptServiceWorkerEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ScriptParseXML", HEARTBEAT_DEFAULT_WINDOW_SIZE),
("ApplicationHeartbeat", 100),
]
ENERGY_READER_BIN = "energymon-file-provider"
ENERGY_READER_TEMP_OUTPUT = "energymon.txt"
SUMMARY_OUTPUT = "summary.txt"
def get_command(build_target, layout_thread_count, renderer, page, profile):
"""Get the command to execute.
"""
return path.join(TOP_DIR, "target", build_target, "servo") + \
" -p %d -o output.png -y %d %s -Z profile-script-events '%s'" % \
(profile, layout_thread_count, renderer, page)
def set_app_environment(log_dir):
"""Set environment variables to enable heartbeats.
"""
prefix = "heartbeat-"
for (profiler, window) in HEARTBEAT_PROFILER_CATEGORIES:
os.environ["SERVO_HEARTBEAT_ENABLE_" + profiler] = ""
os.environ["SERVO_HEARTBEAT_LOG_" + profiler] = path.join(log_dir, prefix + profiler + ".log")
os.environ["SERVO_HEARTBEAT_WINDOW_" + profiler] = str(window)
def start_energy_reader():
"""Energy reader writes to a file that we will poll.
"""
os.system(ENERGY_READER_BIN + " " + ENERGY_READER_TEMP_OUTPUT + "&")
def stop_energy_reader():
"""Stop the energy reader and remove its temp file.
"""
os.system("pkill -x " + ENERGY_READER_BIN)
os.remove(ENERGY_READER_TEMP_OUTPUT)
def read_energy():
"""Poll the energy reader's temp file.
"""
data = 0
with open(ENERGY_READER_TEMP_OUTPUT, "r") as em:
data = int(em.read().replace('\n', ''))
return data
def git_rev_hash():
"""Get the git revision hash.
"""
return subprocess.check_output(['git', 'rev-parse', 'HEAD']).rstrip()
def git_rev_hash_short():
"""Get the git revision short hash.
"""
return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).rstrip()
def execute(base_dir, build_target, renderer, page, profile, trial, layout_thread_count):
"""Run a single execution.
"""
log_dir = path.join(base_dir, "logs_l" + str(layout_thread_count),
"trial_" + str(trial))
if os.path.exists(log_dir):
print "Log directory already exists: " + log_dir
sys.exit(1)
os.makedirs(log_dir)
set_app_environment(log_dir)
cmd = get_command(build_target, layout_thread_count, renderer, page, profile)
# Execute
start_energy_reader()
print 'sleep ' + str(GUARD_TIME)
time.sleep(GUARD_TIME)
time_start = time.time()
energy_start = read_energy()
print cmd
os.system(cmd)
energy_end = read_energy()
time_end = time.time()
stop_energy_reader()
print 'sleep ' + str(GUARD_TIME)
time.sleep(GUARD_TIME)
uj = energy_end - energy_start
latency = time_end - time_start
watts = uj / 1000000.0 / latency
# Write a file that describes this execution
with open(path.join(log_dir, SUMMARY_OUTPUT), "w") as f:
f.write("Datetime (UTC): " + datetime.datetime.utcnow().isoformat())
f.write("\nPlatform: " + platform.platform())
f.write("\nGit hash: " + git_rev_hash())
f.write("\nGit short hash: " + git_rev_hash_short())
f.write("\nRelease: " + build_target)
f.write("\nLayout threads: " + str(layout_thread_count))
f.write("\nTrial: " + str(trial))
f.write("\nCommand: " + cmd)
f.write("\nTime (sec): " + str(latency))
f.write("\nEnergy (uJ): " + str(uj))
f.write("\nPower (W): " + str(watts))
def characterize(build_target, base_dir, (min_layout_threads, max_layout_threads), renderer, page, profile, trials):
"""Run all configurations and capture results.
"""
for layout_thread_count in xrange(min_layout_threads, max_layout_threads + 1):
for trial in xrange(1, trials + 1):
execute(base_dir, build_target, renderer, page, profile, trial, layout_thread_count)
def main():
"""For this script to be useful, the following conditions are needed:
- HEARTBEAT_PROFILER_CATEGORIES should be aligned with the profiler categories in the source code.
- The "energymon" project needs to be installed to the system (libraries and the "energymon" binary).
- The "default" energymon library will be used - make sure you choose one that is useful for your system setup
when installing energymon.
- Build servo in release mode with the "energy-profiling" feature enabled (this links with the energymon lib).
"""
# Default max number of layout threads
max_layout_threads = 1
# Default benchmark
benchmark = path.join(TOP_DIR, "tests", "html", "perf-rainbow.html")
# Default renderer
renderer = ""
# Default output directory
output_dir = "heartbeat_logs"
# Default build target
build_target = "release"
# Default profile interval
profile = 60
# Default single argument
single = False
# Default number of trials
trials = 1
# Parsing the input of the script
parser = argparse.ArgumentParser(description="Characterize Servo timing and energy behavior")
parser.add_argument("-b", "--benchmark",
default=benchmark,
help="Gets the benchmark, for example \"-b http://www.example.com\"")
parser.add_argument("-d", "--debug",
action='store_true',
help="Use debug build instead of release build")
parser.add_argument("-w", "--webrender",
action='store_true',
help="Use webrender backend")
parser.add_argument("-l", "--max_layout_threads",
help="Specify the maximum number of threads for layout, for example \"-l 5\"")
parser.add_argument("-o", "--output",
help="Specify the log output directory, for example \"-o heartbeat_logs\"")
parser.add_argument("-p", "--profile",
default=60,
help="Profiler output interval, for example \"-p 60\"")
parser.add_argument("-s", "--single",
action='store_true',
help="Just run a single trial of the config provided, for example \"-s\"")
parser.add_argument("-t", "--trials",
default=1,
type=int,
help="Number of trials to run for each configuration, for example \"-t 1\"")
args = parser.parse_args()
if args.benchmark:
benchmark = args.benchmark
if args.debug:
build_target = "debug"
if args.webrender:
renderer = "-w"
if args.max_layout_threads:
max_layout_threads = int(args.max_layout_threads)
if args.output:
output_dir = args.output
if args.profile:
profile = args.profile
if args.single:
single = True
if args.trials:
trials = args.trials
if os.path.exists(output_dir):
print "Output directory already exists: " + output_dir
sys.exit(1)
os.makedirs(output_dir)
if single:
execute(output_dir, build_target, renderer, benchmark, profile, trials, max_layout_threads)
else:
characterize(build_target, output_dir, (1, max_layout_threads), renderer, benchmark, profile, trials)
if __name__ == "__main__":
main()
| mpl-2.0 | -4,448,387,784,574,559,000 | 39.334615 | 116 | 0.652141 | false |
canvasnetworks/canvas | website/canvas/migrations/0107_auto__del_field_userwarning_bantime__add_field_userwarning_disable_use.py | 2 | 16736 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'UserWarning.bantime'
db.delete_column('canvas_userwarning', 'bantime')
# Adding field 'UserWarning.disable_user'
db.add_column('canvas_userwarning', 'disable_user', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False)
def backwards(self, orm):
# Adding field 'UserWarning.bantime'
db.add_column('canvas_userwarning', 'bantime', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False)
# Deleting field 'UserWarning.disable_user'
db.delete_column('canvas_userwarning', 'disable_user')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'canvas.bestof': {
'Meta': {'object_name': 'BestOf'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'best_of'", 'null': 'True', 'blank': 'True', 'to': "orm['canvas.Category']"}),
'chosen_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'best_of'", 'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {})
},
'canvas.category': {
'Meta': {'object_name': 'Category'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'founded': ('django.db.models.fields.FloatField', [], {'default': '1298956320'}),
'founder': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'founded_categories'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderators': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'moderated_categories'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.comment': {
'Meta': {'object_name': 'Comment'},
'anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'comments'", 'null': 'True', 'blank': 'True', 'to': "orm['canvas.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': "'0.0.0.0'", 'max_length': '15'}),
'judged': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'ot_hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parent_comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'replies'", 'null': 'True', 'blank': 'True', 'to': "orm['canvas.Comment']"}),
'parent_content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'replied_comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['canvas.Comment']", 'null': 'True', 'blank': 'True'}),
'reply_content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'used_in_comments'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'reply_text': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0', 'db_index': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.commentflag': {
'Meta': {'object_name': 'CommentFlag'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flags'", 'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'type_id': ('django.db.models.fields.IntegerField', [], {}),
'undone': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flags'", 'to': "orm['auth.User']"})
},
'canvas.commentmoderationlog': {
'Meta': {'object_name': 'CommentModerationLog'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'note': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'moderated_comments_log'", 'to': "orm['auth.User']"}),
'visibility': ('django.db.models.fields.IntegerField', [], {})
},
'canvas.commentpin': {
'Meta': {'object_name': 'CommentPin'},
'auto': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.commentsticker': {
'Meta': {'object_name': 'CommentSticker'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stickers'", 'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'type_id': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'canvas.content': {
'Meta': {'object_name': 'Content'},
'alpha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'animated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': "'0.0.0.0'", 'max_length': '15'}),
'remix_of': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'remixes'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'remix_text': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}),
'source_url': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '4000', 'blank': 'True'}),
'stamps_used': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'used_as_stamp'", 'blank': 'True', 'to': "orm['canvas.Content']"}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'url_mapping': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.ContentUrlMapping']", 'null': 'True', 'blank': 'True'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.contenturlmapping': {
'Meta': {'object_name': 'ContentUrlMapping'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'canvas.facebookuser': {
'Meta': {'object_name': 'FacebookUser'},
'email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'fb_uid': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_invited': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'canvas.followcategory': {
'Meta': {'object_name': 'FollowCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'followers'", 'to': "orm['canvas.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'following'", 'to': "orm['auth.User']"})
},
'canvas.invitecode': {
'Meta': {'object_name': 'InviteCode'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invitee': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'invited_from'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'inviter': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'sent_invites'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"})
},
'canvas.stashcontent': {
'Meta': {'object_name': 'StashContent'},
'content': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Content']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.userinfo': {
'Meta': {'object_name': 'UserInfo'},
'free_invites': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invite_bypass': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'last_activity_check': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'post_anonymously': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'power_level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'canvas.usermoderationlog': {
'Meta': {'object_name': 'UserModerationLog'},
'action': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'note': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'moderation_log'", 'to': "orm['auth.User']"})
},
'canvas.userwarning': {
'Meta': {'object_name': 'UserWarning'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['canvas.Comment']", 'null': 'True', 'blank': 'True'}),
'confirmed': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'custom_message': ('django.db.models.fields.TextField', [], {}),
'disable_user': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issued': ('canvas.util.UnixTimestampField', [], {}),
'stock_message': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_warnings'", 'to': "orm['auth.User']"}),
'viewed': ('canvas.util.UnixTimestampField', [], {'default': '0'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['canvas']
| bsd-3-clause | -7,790,802,355,295,737,000 | 76.124424 | 195 | 0.549713 | false |
szabolcsbalogh/bha | shield/shield_mqtt.py | 1 | 1622 | import tornado.log
import mosquitto
import urlparse
__author__ = 'Szabolcs Balogh'
__email__ = 'baloghsz@szabi.org'
class ShieldMQTT:
def __init__(self, url, tc):
tornado.log.app_log.info("MQTT: init")
self.tc = tc
self.url = urlparse.urlparse(url)
self.mqttc = None
self.try_to_connect()
def try_to_connect(self):
tornado.log.app_log.info("MQTT: connecting")
try:
self.mqttc = mosquitto.Mosquitto()
self.mqttc.on_message = self.on_message
self.mqttc.on_disconnect = self.on_disconnect
self.mqttc.username_pw_set(self.url.username, self.url.password)
self.mqttc.connect(self.url.hostname, self.url.port)
self.mqttc.subscribe("bha/sensors/temperature/#", 0)
except:
self.mqttc = None
tornado.log.app_log.info("MQTT: connected")
else:
tornado.log.app_log.info("MQTT: not connected")
def get_status(self):
return self.mqttc is not None
def on_message(self, mosq, obj, msg):
if self.mqttc:
self.tc.process_external({msg.topic.split('/')[-1]: {"temperature": float(msg.payload)}})
tornado.log.app_log.info("MQTT: %s %s" % (msg.topic.split('/')[-1], str(msg.payload)))
else:
self.try_to_connect()
def on_disconnect(self, client, userdata, rc):
tornado.log.app_log.info("MQTT: disconnected")
self.mqttc = None
def poll_1s(self):
if self.mqttc:
self.mqttc.loop(timeout=0.1)
else:
self.try_to_connect()
| gpl-2.0 | 1,361,567,007,354,177,800 | 31.44 | 101 | 0.583231 | false |
taknevski/tensorflow-xsmm | tensorflow/contrib/distributions/python/ops/beta.py | 11 | 13333 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The Beta distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.distributions.python.ops import distribution
from tensorflow.contrib.distributions.python.ops import distribution_util
from tensorflow.contrib.distributions.python.ops import kullback_leibler
from tensorflow.contrib.framework.python.framework import tensor_util as contrib_tensor_util
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import random_ops
__all__ = [
"Beta",
"BetaWithSoftplusConcentration",
]
_beta_sample_note = """Note: `x` must have dtype `self.dtype` and be in
`[0, 1].` It must have a shape compatible with `self.batch_shape()`."""
class Beta(distribution.Distribution):
"""Beta distribution.
The Beta distribution is defined over the `(0, 1)` interval using parameters
`concentration1` (aka "alpha") and `concentration0` (aka "beta").
#### Mathematical Details
The probability density function (pdf) is,
```none
pdf(x; alpha, beta) = x**(alpha - 1) (1 - x)**(beta - 1) / Z
Z = Gamma(alpha) Gamma(beta) / Gamma(alpha + beta)
```
where:
* `concentration1 = alpha`,
* `concentration0 = beta`,
* `Z` is the normalization constant, and,
* `Gamma` is the [gamma function](
https://en.wikipedia.org/wiki/Gamma_function).
The concentration parameters represent mean total counts of a `1` or a `0`,
i.e.,
```none
concentration1 = alpha = mean * total_concentration
concentration0 = beta = (1. - mean) * total_concentration
```
where `mean` in `(0, 1)` and `total_concentration` is a positive real number
representing a mean `total_count = concentration1 + concentration0`.
Distribution parameters are automatically broadcast in all functions; see
examples for details.
#### Examples
```python
# Create a batch of three Beta distributions.
alpha = [1, 2, 3]
beta = [1, 2, 3]
dist = Beta(alpha, beta)
dist.sample([4, 5]) # Shape [4, 5, 3]
# `x` has three batch entries, each with two samples.
x = [[.1, .4, .5],
[.2, .3, .5]]
# Calculate the probability of each pair of samples under the corresponding
# distribution in `dist`.
dist.prob(x) # Shape [2, 3]
```
```python
# Create batch_shape=[2, 3] via parameter broadcast:
alpha = [[1.], [2]] # Shape [2, 1]
beta = [3., 4, 5] # Shape [3]
dist = Beta(alpha, beta)
# alpha broadcast as: [[1., 1, 1,],
# [2, 2, 2]]
# beta broadcast as: [[3., 4, 5],
# [3, 4, 5]]
# batch_Shape [2, 3]
dist.sample([4, 5]) # Shape [4, 5, 2, 3]
x = [.2, .3, .5]
# x will be broadcast as [[.2, .3, .5],
# [.2, .3, .5]],
# thus matching batch_shape [2, 3].
dist.prob(x) # Shape [2, 3]
```
"""
def __init__(self,
concentration1=None,
concentration0=None,
validate_args=False,
allow_nan_stats=True,
name="Beta"):
"""Initialize a batch of Beta distributions.
Args:
concentration1: Positive floating-point `Tensor` indicating mean
number of successes; aka "alpha". Implies `self.dtype` and
`self.batch_shape`, i.e.,
`concentration1.shape = [N1, N2, ..., Nm] = self.batch_shape`.
concentration0: Positive floating-point `Tensor` indicating mean
number of failures; aka "beta". Otherwise has same semantics as
`concentration1`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = locals()
with ops.name_scope(name, values=[concentration1, concentration0]):
self._concentration1 = self._maybe_assert_valid_concentration(
ops.convert_to_tensor(concentration1, name="concentration1"),
validate_args)
self._concentration0 = self._maybe_assert_valid_concentration(
ops.convert_to_tensor(concentration0, name="concentration0"),
validate_args)
contrib_tensor_util.assert_same_float_dtype([
self._concentration1, self._concentration0])
self._total_concentration = self._concentration1 + self._concentration0
super(Beta, self).__init__(
dtype=self._total_concentration.dtype,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
reparameterization_type=distribution.NOT_REPARAMETERIZED,
parameters=parameters,
graph_parents=[self._concentration1,
self._concentration0,
self._total_concentration],
name=name)
@staticmethod
def _param_shapes(sample_shape):
return dict(zip(
["concentration1", "concentration0"],
[ops.convert_to_tensor(sample_shape, dtype=dtypes.int32)] * 2))
@property
def concentration1(self):
"""Concentration parameter associated with a `1` outcome."""
return self._concentration1
@property
def concentration0(self):
"""Concentration parameter associated with a `0` outcome."""
return self._concentration0
@property
def total_concentration(self):
"""Sum of concentration parameters."""
return self._total_concentration
def _batch_shape_tensor(self):
return array_ops.shape(self.total_concentration)
def _batch_shape(self):
return self.total_concentration.get_shape()
def _event_shape_tensor(self):
return constant_op.constant([], dtype=dtypes.int32)
def _event_shape(self):
return tensor_shape.scalar()
def _sample_n(self, n, seed=None):
expanded_concentration1 = array_ops.ones_like(
self.total_concentration, dtype=self.dtype) * self.concentration1
expanded_concentration0 = array_ops.ones_like(
self.total_concentration, dtype=self.dtype) * self.concentration0
gamma1_sample = random_ops.random_gamma(
shape=[n],
alpha=expanded_concentration1,
dtype=self.dtype,
seed=seed)
gamma2_sample = random_ops.random_gamma(
shape=[n],
alpha=expanded_concentration0,
dtype=self.dtype,
seed=distribution_util.gen_new_seed(seed, "beta"))
beta_sample = gamma1_sample / (gamma1_sample + gamma2_sample)
return beta_sample
@distribution_util.AppendDocstring(_beta_sample_note)
def _log_prob(self, x):
return self._log_unnormalized_prob(x) - self._log_normalization()
@distribution_util.AppendDocstring(_beta_sample_note)
def _prob(self, x):
return math_ops.exp(self._log_prob(x))
@distribution_util.AppendDocstring(_beta_sample_note)
def _log_cdf(self, x):
return math_ops.log(self._cdf(x))
@distribution_util.AppendDocstring(_beta_sample_note)
def _cdf(self, x):
return math_ops.betainc(self.concentration1, self.concentration0, x)
def _log_unnormalized_prob(self, x):
x = self._maybe_assert_valid_sample(x)
return ((self.concentration1 - 1.) * math_ops.log(x)
+ (self.concentration0 - 1.) * math_ops.log1p(-x))
def _log_normalization(self):
return (math_ops.lgamma(self.concentration1)
+ math_ops.lgamma(self.concentration0)
- math_ops.lgamma(self.total_concentration))
def _entropy(self):
return (
self._log_normalization()
- (self.concentration1 - 1.) * math_ops.digamma(self.concentration1)
- (self.concentration0 - 1.) * math_ops.digamma(self.concentration0)
+ ((self.total_concentration - 2.) *
math_ops.digamma(self.total_concentration)))
def _mean(self):
return self._concentration1 / self._total_concentration
def _variance(self):
return self._mean() * (1. - self._mean()) / (1. + self.total_concentration)
@distribution_util.AppendDocstring(
"""Note: The mode is undefined when `concentration1 <= 1` or
`concentration0 <= 1`. If `self.allow_nan_stats` is `True`, `NaN`
is used for undefined modes. If `self.allow_nan_stats` is `False` an
exception is raised when one or more modes are undefined.""")
def _mode(self):
mode = (self.concentration1 - 1.) / (self.total_concentration - 2.)
if self.allow_nan_stats:
nan = array_ops.fill(
self.batch_shape_tensor(),
np.array(np.nan, dtype=self.dtype.as_numpy_dtype()),
name="nan")
is_defined = math_ops.logical_and(self.concentration1 > 1.,
self.concentration0 > 1.)
return array_ops.where(is_defined, mode, nan)
return control_flow_ops.with_dependencies([
check_ops.assert_less(
array_ops.ones([], dtype=self.dtype),
self.concentration1,
message="Mode undefined for concentration1 <= 1."),
check_ops.assert_less(
array_ops.ones([], dtype=self.dtype),
self.concentration0,
message="Mode undefined for concentration0 <= 1.")
], mode)
def _maybe_assert_valid_concentration(self, concentration, validate_args):
"""Checks the validity of a concentration parameter."""
if not validate_args:
return concentration
return control_flow_ops.with_dependencies([
check_ops.assert_positive(
concentration,
message="Concentration parameter must be positive."),
], concentration)
def _maybe_assert_valid_sample(self, x):
"""Checks the validity of a sample."""
if not self.validate_args:
return x
return control_flow_ops.with_dependencies([
check_ops.assert_positive(
x,
message="sample must be positive"),
check_ops.assert_less(
x, array_ops.ones([], self.dtype),
message="sample must be no larger than `1`."),
], x)
class BetaWithSoftplusConcentration(Beta):
"""Beta with softplus transform of `concentration1` and `concentration0`."""
def __init__(self,
concentration1,
concentration0,
validate_args=False,
allow_nan_stats=True,
name="BetaWithSoftplusConcentration"):
parameters = locals()
with ops.name_scope(name, values=[concentration1,
concentration0]) as ns:
super(BetaWithSoftplusConcentration, self).__init__(
concentration1=nn.softplus(concentration1,
name="softplus_concentration1"),
concentration0=nn.softplus(concentration0,
name="softplus_concentration0"),
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
name=ns)
self._parameters = parameters
@kullback_leibler.RegisterKL(Beta, Beta)
def _kl_beta_beta(d1, d2, name=None):
"""Calculate the batchwise KL divergence KL(d1 || d2) with d1 and d2 Beta.
Args:
d1: instance of a Beta distribution object.
d2: instance of a Beta distribution object.
name: (optional) Name to use for created operations.
default is "kl_beta_beta".
Returns:
Batchwise KL(d1 || d2)
"""
def delta(fn, is_property=True):
fn1 = getattr(d1, fn)
fn2 = getattr(d2, fn)
return (fn2 - fn1) if is_property else (fn2() - fn1())
with ops.name_scope(name, "kl_beta_beta", values=[
d1.concentration1,
d1.concentration0,
d1.total_concentration,
d2.concentration1,
d2.concentration0,
d2.total_concentration,
]):
return (delta("_log_normalization", is_property=False)
- math_ops.digamma(d1.concentration1) * delta("concentration1")
- math_ops.digamma(d1.concentration0) * delta("concentration0")
+ (math_ops.digamma(d1.total_concentration)
* delta("total_concentration")))
| apache-2.0 | 5,202,218,043,402,098,000 | 35.3297 | 92 | 0.644791 | false |
alphagov/icinga_slack_webhook | tests/test_webhook.py | 1 | 4454 | import unittest
from icinga_slack.webhook import AttachmentField, AttachmentFieldList, Attachment, AttachmentList, Message
class TestCommon(unittest.TestCase):
def setUp(self):
self.example_attachment_field = {"title": "My Title", "value": "My Value"}
self.example_attachment_field2 = {"title": "My Second Title", "value": "My Second Value", "short": True}
self.example_attachment_field_list = [self.example_attachment_field]
self.example_attachment_field_list2 = [self.example_attachment_field, self.example_attachment_field2]
self.example_attachment = {"fallback": "Fallback Message", "fields": self.example_attachment_field_list, "test": "Example Text", "pretext": "Example pretext", "color": "#FF0000"}
class TestAttachmentField(TestCommon):
def setUp(self):
self.attachment_field = AttachmentField("My Title", "My Value")
self.attachment_field_optional = AttachmentField("My Second Title", "My Second Value", True)
def test_attachment_field_required_attributes(self):
self.assertTrue('title' in self.attachment_field.keys())
self.assertTrue('value' in self.attachment_field.keys())
self.assertTrue('short' in self.attachment_field.keys())
def test_attachment_field_optional_defaults(self):
self.assertFalse(self.attachment_field['short'])
self.assertTrue(self.attachment_field_optional['short'])
class TestAttachmentFieldList(TestCommon):
def test_creating_one_field_attachment_list(self):
self.attachment_list = AttachmentFieldList(self.example_attachment_field)
self.assertEqual(len(self.attachment_list), 1)
def test_creating_two_field_attachment_list(self):
self.attachment_list = AttachmentFieldList(self.example_attachment_field, self.example_attachment_field2)
self.assertEqual(len(self.attachment_list), 2)
self.assertEqual(self.attachment_list[1]["value"], "My Second Value")
class TestAttachment(TestCommon):
def test_attachment_with_defaults(self):
self.attachment = Attachment("Fallback Message", self.example_attachment_field_list)
self.assertEqual(self.attachment['fallback'], "Fallback Message")
self.assertEqual(self.attachment['fields'], self.example_attachment_field_list )
def test_attachment_with_optionals(self):
self.attachment = Attachment("Fallback Message", self.example_attachment_field_list, [], "Text", "Pretext", "#FF0000")
self.assertEqual(self.attachment['text'], "Text")
self.assertEqual(self.attachment['pretext'], "Pretext")
self.assertEqual(self.attachment['color'], "#FF0000")
class TestAttachmentList(TestCommon):
def test_single_attachment_list(self):
self.attachment_list = AttachmentList(self.example_attachment)
self.assertEqual(len(self.attachment_list), 1)
def test_two_attachment_list(self):
self.attachment_list = AttachmentList(self.example_attachment, self.example_attachment)
self.assertEqual(len(self.attachment_list), 2)
class TestMessage(TestCommon):
def test_message_mandatory_options(self):
self.message = Message("#webops", "username", "test message")
self.assertEqual(self.message['channel'], "#webops")
self.assertEqual(self.message['text'], "test message")
self.assertEqual(self.message['username'], "username")
def test_message_attachment(self):
self.message = Message("#webops", "username", "test message")
self.message.attach("message", "hostname.domain", "CRITICAL")
self.assertEqual(len(self.message['attachments']), 1)
def test_message_attachment_expand(self):
self.message = Message("#webops", "test message", "username")
self.message.attach(r'foo\nbar', "hostname.domain", "CRITICAL", expand=True)
self.message.attach(r'foo\nbar', "hostname.domain", "CRITICAL", expand=False)
self.assertTrue('foo\nbar' in self.message['attachments'][0]['text'])
self.assertTrue(r'foo\nbar' in self.message['attachments'][1]['text'])
def test_message_multiple_attachment(self):
self.message = Message("#webops", "username", "test message")
self.message.attach("message", "hostname.domain", "CRITICAL")
self.message.attach("message2", "hostname.domain", "CRITICAL")
self.assertEqual(len(self.message['attachments']), 2)
if __name__ == '__main__':
unittest.main()
| mit | 3,610,594,828,868,478,000 | 45.884211 | 186 | 0.695779 | false |
TimZaman/DIGITS | digits/pretrained_model/test_views.py | 2 | 3703 | # Copyright (c) 2016, NVIDIA CORPORATION. All rights reserved.
import itertools
import json
import os
import re
import shutil
import tempfile
import time
import unittest
import urllib
import io
import tarfile
# Find the best implementation available
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from bs4 import BeautifulSoup
import flask
import mock
import PIL.Image
from urlparse import urlparse
from digits.config import config_value
from digits.pretrained_model import PretrainedModelJob
import digits.webapp
import digits.dataset.images.classification.test_views
import digits.model.images.classification.test_views
from digits import test_utils
import digits.test_views
# Must import after importing digit.config
import caffe_pb2
# May be too short on a slow system
TIMEOUT_DATASET = 45
TIMEOUT_MODEL = 60
class BaseTestUpload(digits.model.images.classification.test_views.BaseViewsTestWithModel):
"""
Tests uploading Pretrained Models
"""
def test_upload_manual(self):
# job = digits.webapp.scheduler.get_job(self.model_id)
job = digits.webapp.scheduler.get_job(self.model_id)
if job is None:
raise AssertionError('Failed To Create Job')
# Write the stats of the job to json,
# and store in tempfile (for archive)
info = job.json_dict(verbose=False,epoch=-1)
task = job.train_task()
snapshot_filename = task.get_snapshot(-1)
weights_file = open(snapshot_filename, 'r')
model_def_file = open(os.path.join(job.dir(),task.model_file), 'r')
labels_file = open(os.path.join(task.dataset.dir(),info["labels file"]), 'r')
rv = self.app.post(
'/pretrained_models/new',
data = {
'weights_file': weights_file,
'model_def_file': model_def_file,
'labels_file': labels_file,
'framework': info['framework'],
'image_type': info["image dimensions"][2],
'resize_mode': info["image resize mode"],
'width': info["image dimensions"][0],
'height': info["image dimensions"][1],
'job_name': 'test_create_pretrained_model_job'
}
)
s = BeautifulSoup(rv.data, 'html.parser')
body = s.select('body')
assert rv.status_code == 302, 'POST failed with %s\n\n%s' % (rv.status_code, body)
def test_upload_archive(self):
job = digits.webapp.scheduler.get_job(self.model_id)
if job is None:
raise AssertionError('Failed To Create Job')
info = json.dumps(job.json_dict(verbose=False,epoch=-1), sort_keys=True, indent=4, separators=(',', ': '))
info_io = io.BytesIO()
info_io.write(info)
tmp = tempfile.NamedTemporaryFile()
tf = tarfile.open(fileobj=tmp, mode='w:')
for path, name in job.download_files(-1):
tf.add(path, arcname=name)
tf_info = tarfile.TarInfo("info.json")
tf_info.size = len(info_io.getvalue())
info_io.seek(0)
tf.addfile(tf_info, info_io)
tmp.flush()
tmp.seek(0)
rv = self.app.post(
'/pretrained_models/upload_archive',
data = {
'archive': tmp
}
)
s = BeautifulSoup(rv.data, 'html.parser')
body = s.select('body')
tmp.close()
assert rv.status_code == 200, 'POST failed with %s\n\n%s' % (rv.status_code, body)
class TestCaffeUpload(BaseTestUpload, test_utils.CaffeMixin):
pass
class TestTorchUpload(BaseTestUpload, test_utils.TorchMixin):
pass
| bsd-3-clause | 8,656,965,230,519,094,000 | 29.858333 | 114 | 0.623819 | false |
BhupeshGupta/frappe | frappe/utils/boilerplate.py | 3 | 6305 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe, os
from frappe.utils import touch_file
def make_boilerplate(dest, app_name):
if not os.path.exists(dest):
print "Destination directory does not exist"
return
hooks = frappe._dict()
hooks.app_name = app_name
app_title = hooks.app_name.replace("_", " ").title()
for key in ("App Title (defaut: {0})".format(app_title), "App Description", "App Publisher",
"App Icon (e.g. 'octicon octicon-zap')", "App Color", "App Email", "App License"):
hook_key = key.split(" (")[0].lower().replace(" ", "_")
hook_val = None
while not hook_val:
hook_val = raw_input(key + ": ")
if hook_key=="app_name" and hook_val.lower().replace(" ", "_") != hook_val:
print "App Name must be all lowercase and without spaces"
hook_val = ""
elif hook_key=="app_title" and not hook_val:
hook_val = app_title
hooks[hook_key] = hook_val
frappe.create_folder(os.path.join(dest, hooks.app_name, hooks.app_name, hooks.app_title),
with_init=True)
frappe.create_folder(os.path.join(dest, hooks.app_name, hooks.app_name, "templates"), with_init=True)
frappe.create_folder(os.path.join(dest, hooks.app_name, hooks.app_name, "templates",
"statics"))
frappe.create_folder(os.path.join(dest, hooks.app_name, hooks.app_name, "templates",
"pages"), with_init=True)
frappe.create_folder(os.path.join(dest, hooks.app_name, hooks.app_name, "templates",
"generators"), with_init=True)
frappe.create_folder(os.path.join(dest, hooks.app_name, hooks.app_name, "config"), with_init=True)
touch_file(os.path.join(dest, hooks.app_name, hooks.app_name, "__init__.py"))
with open(os.path.join(dest, hooks.app_name, "MANIFEST.in"), "w") as f:
f.write(manifest_template.format(**hooks))
with open(os.path.join(dest, hooks.app_name, ".gitignore"), "w") as f:
f.write(gitignore_template)
with open(os.path.join(dest, hooks.app_name, "setup.py"), "w") as f:
f.write(setup_template.format(**hooks))
with open(os.path.join(dest, hooks.app_name, "requirements.txt"), "w") as f:
f.write("frappe")
touch_file(os.path.join(dest, hooks.app_name, "README.md"))
with open(os.path.join(dest, hooks.app_name, "license.txt"), "w") as f:
f.write("License: " + hooks.app_license)
with open(os.path.join(dest, hooks.app_name, hooks.app_name, "modules.txt"), "w") as f:
f.write(hooks.app_title)
with open(os.path.join(dest, hooks.app_name, hooks.app_name, "hooks.py"), "w") as f:
f.write(hooks_template.format(**hooks))
touch_file(os.path.join(dest, hooks.app_name, hooks.app_name, "patches.txt"))
with open(os.path.join(dest, hooks.app_name, hooks.app_name, "config", "desktop.py"), "w") as f:
f.write(desktop_template.format(**hooks))
manifest_template = """include MANIFEST.in
include requirements.txt
include *.json
include *.md
include *.py
include *.txt
recursive-include {app_name} *.css
recursive-include {app_name} *.csv
recursive-include {app_name} *.html
recursive-include {app_name} *.ico
recursive-include {app_name} *.js
recursive-include {app_name} *.json
recursive-include {app_name} *.md
recursive-include {app_name} *.png
recursive-include {app_name} *.py
recursive-include {app_name} *.svg
recursive-include {app_name} *.txt
recursive-exclude {app_name} *.pyc"""
hooks_template = """app_name = "{app_name}"
app_title = "{app_title}"
app_publisher = "{app_publisher}"
app_description = "{app_description}"
app_icon = "{app_icon}"
app_color = "{app_color}"
app_email = "{app_email}"
app_version = "0.0.1"
# Includes in <head>
# ------------------
# include js, css files in header of desk.html
# app_include_css = "/assets/{app_name}/css/{app_name}.css"
# app_include_js = "/assets/{app_name}/js/{app_name}.js"
# include js, css files in header of web template
# web_include_css = "/assets/{app_name}/css/{app_name}.css"
# web_include_js = "/assets/{app_name}/js/{app_name}.js"
# Home Pages
# ----------
# application home page (will override Website Settings)
# home_page = "login"
# website user home page (by Role)
# role_home_page = {{
# "Role": "home_page"
# }}
# Generators
# ----------
# automatically create page for each record of this doctype
# website_generators = ["Web Page"]
# Installation
# ------------
# before_install = "{app_name}.install.before_install"
# after_install = "{app_name}.install.after_install"
# Desk Notifications
# ------------------
# See frappe.core.notifications.get_notification_config
# notification_config = "{app_name}.notifications.get_notification_config"
# Permissions
# -----------
# Permissions evaluated in scripted ways
# permission_query_conditions = {{
# "Event": "frappe.core.doctype.event.event.get_permission_query_conditions",
# }}
#
# has_permission = {{
# "Event": "frappe.core.doctype.event.event.has_permission",
# }}
# Document Events
# ---------------
# Hook on document methods and events
# doc_events = {{
# "*": {{
# "on_update": "method",
# "on_cancel": "method",
# "on_trash": "method"
# }}
# }}
# Scheduled Tasks
# ---------------
# scheduler_events = {{
# "all": [
# "{app_name}.tasks.all"
# ],
# "daily": [
# "{app_name}.tasks.daily"
# ],
# "hourly": [
# "{app_name}.tasks.hourly"
# ],
# "weekly": [
# "{app_name}.tasks.weekly"
# ]
# "monthly": [
# "{app_name}.tasks.monthly"
# ]
# }}
# Testing
# -------
# before_tests = "{app_name}.install.before_tests"
# Overriding Whitelisted Methods
# ------------------------------
#
# override_whitelisted_methods = {{
# "frappe.core.doctype.event.event.get_events": "{app_name}.event.get_events"
# }}
"""
desktop_template = """from frappe import _
def get_data():
return {{
"{app_title}": {{
"color": "{app_color}",
"icon": "{app_icon}",
"type": "module",
"label": _("{app_title}")
}}
}}
"""
setup_template = """from setuptools import setup, find_packages
import os
version = '0.0.1'
setup(
name='{app_name}',
version=version,
description='{app_description}',
author='{app_publisher}',
author_email='{app_email}',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
install_requires=("frappe",),
)
"""
gitignore_template = """.DS_Store
*.pyc
*.egg-info
*.swp
tags"""
| mit | 3,771,579,613,394,773,000 | 25.603376 | 102 | 0.647898 | false |
dwfreed/mitmproxy | test/mitmproxy/addons/test_serverplayback.py | 1 | 9861 | import os
import urllib
from mitmproxy.test import tutils
from mitmproxy.test import tflow
from mitmproxy.test import taddons
import mitmproxy.test.tutils
from mitmproxy.addons import serverplayback
from mitmproxy import options
from mitmproxy import exceptions
from mitmproxy import io
def tdump(path, flows):
w = io.FlowWriter(open(path, "wb"))
for i in flows:
w.add(i)
def test_config():
s = serverplayback.ServerPlayback()
with tutils.tmpdir() as p:
with taddons.context() as tctx:
fpath = os.path.join(p, "flows")
tdump(fpath, [tflow.tflow(resp=True)])
tctx.configure(s, server_replay=[fpath])
tutils.raises(
exceptions.OptionsError,
tctx.configure,
s,
server_replay=[p]
)
def test_tick():
s = serverplayback.ServerPlayback()
with taddons.context() as tctx:
s.stop = True
s.final_flow = tflow.tflow()
s.final_flow.live = False
s.tick()
assert tctx.master.should_exit.is_set()
def test_server_playback():
sp = serverplayback.ServerPlayback()
sp.configure(options.Options(), [])
f = tflow.tflow(resp=True)
assert not sp.flowmap
sp.load([f])
assert sp.flowmap
assert sp.next_flow(f)
assert not sp.flowmap
sp.load([f])
assert sp.flowmap
sp.clear()
assert not sp.flowmap
def test_ignore_host():
sp = serverplayback.ServerPlayback()
sp.configure(options.Options(server_replay_ignore_host=True), [])
r = tflow.tflow(resp=True)
r2 = tflow.tflow(resp=True)
r.request.host = "address"
r2.request.host = "address"
assert sp._hash(r) == sp._hash(r2)
r2.request.host = "wrong_address"
assert sp._hash(r) == sp._hash(r2)
def test_ignore_content():
s = serverplayback.ServerPlayback()
s.configure(options.Options(server_replay_ignore_content=False), [])
r = tflow.tflow(resp=True)
r2 = tflow.tflow(resp=True)
r.request.content = b"foo"
r2.request.content = b"foo"
assert s._hash(r) == s._hash(r2)
r2.request.content = b"bar"
assert not s._hash(r) == s._hash(r2)
s.configure(options.Options(server_replay_ignore_content=True), [])
r = tflow.tflow(resp=True)
r2 = tflow.tflow(resp=True)
r.request.content = b"foo"
r2.request.content = b"foo"
assert s._hash(r) == s._hash(r2)
r2.request.content = b"bar"
assert s._hash(r) == s._hash(r2)
r2.request.content = b""
assert s._hash(r) == s._hash(r2)
r2.request.content = None
assert s._hash(r) == s._hash(r2)
def test_ignore_content_wins_over_params():
s = serverplayback.ServerPlayback()
s.configure(
options.Options(
server_replay_ignore_content=True,
server_replay_ignore_payload_params=[
"param1", "param2"
]
),
[]
)
# NOTE: parameters are mutually exclusive in options
r = tflow.tflow(resp=True)
r.request.headers["Content-Type"] = "application/x-www-form-urlencoded"
r.request.content = b"paramx=y"
r2 = tflow.tflow(resp=True)
r2.request.headers["Content-Type"] = "application/x-www-form-urlencoded"
r2.request.content = b"paramx=x"
# same parameters
assert s._hash(r) == s._hash(r2)
def test_ignore_payload_params_other_content_type():
s = serverplayback.ServerPlayback()
with taddons.context() as tctx:
tctx.configure(
s,
server_replay_ignore_content=False,
server_replay_ignore_payload_params=[
"param1", "param2"
]
)
r = tflow.tflow(resp=True)
r.request.headers["Content-Type"] = "application/json"
r.request.content = b'{"param1":"1"}'
r2 = tflow.tflow(resp=True)
r2.request.headers["Content-Type"] = "application/json"
r2.request.content = b'{"param1":"1"}'
# same content
assert s._hash(r) == s._hash(r2)
# distint content (note only x-www-form-urlencoded payload is analysed)
r2.request.content = b'{"param1":"2"}'
assert not s._hash(r) == s._hash(r2)
def test_hash():
s = serverplayback.ServerPlayback()
s.configure(options.Options(), [])
r = tflow.tflow()
r2 = tflow.tflow()
assert s._hash(r)
assert s._hash(r) == s._hash(r2)
r.request.headers["foo"] = "bar"
assert s._hash(r) == s._hash(r2)
r.request.path = "voing"
assert s._hash(r) != s._hash(r2)
r.request.path = "path?blank_value"
r2.request.path = "path?"
assert s._hash(r) != s._hash(r2)
def test_headers():
s = serverplayback.ServerPlayback()
s.configure(options.Options(server_replay_use_headers=["foo"]), [])
r = tflow.tflow(resp=True)
r.request.headers["foo"] = "bar"
r2 = tflow.tflow(resp=True)
assert not s._hash(r) == s._hash(r2)
r2.request.headers["foo"] = "bar"
assert s._hash(r) == s._hash(r2)
r2.request.headers["oink"] = "bar"
assert s._hash(r) == s._hash(r2)
r = tflow.tflow(resp=True)
r2 = tflow.tflow(resp=True)
assert s._hash(r) == s._hash(r2)
def test_load():
s = serverplayback.ServerPlayback()
s.configure(options.Options(), [])
r = tflow.tflow(resp=True)
r.request.headers["key"] = "one"
r2 = tflow.tflow(resp=True)
r2.request.headers["key"] = "two"
s.load([r, r2])
assert s.count() == 2
n = s.next_flow(r)
assert n.request.headers["key"] == "one"
assert s.count() == 1
n = s.next_flow(r)
assert n.request.headers["key"] == "two"
assert not s.flowmap
assert s.count() == 0
assert not s.next_flow(r)
def test_load_with_server_replay_nopop():
s = serverplayback.ServerPlayback()
s.configure(options.Options(server_replay_nopop=True), [])
r = tflow.tflow(resp=True)
r.request.headers["key"] = "one"
r2 = tflow.tflow(resp=True)
r2.request.headers["key"] = "two"
s.load([r, r2])
assert s.count() == 2
s.next_flow(r)
assert s.count() == 2
def test_ignore_params():
s = serverplayback.ServerPlayback()
s.configure(
options.Options(
server_replay_ignore_params=["param1", "param2"]
),
[]
)
r = tflow.tflow(resp=True)
r.request.path = "/test?param1=1"
r2 = tflow.tflow(resp=True)
r2.request.path = "/test"
assert s._hash(r) == s._hash(r2)
r2.request.path = "/test?param1=2"
assert s._hash(r) == s._hash(r2)
r2.request.path = "/test?param2=1"
assert s._hash(r) == s._hash(r2)
r2.request.path = "/test?param3=2"
assert not s._hash(r) == s._hash(r2)
def thash(r, r2, setter):
s = serverplayback.ServerPlayback()
s.configure(
options.Options(
server_replay_ignore_payload_params=["param1", "param2"]
),
[]
)
setter(r, paramx="x", param1="1")
setter(r2, paramx="x", param1="1")
# same parameters
assert s._hash(r) == s._hash(r2)
# ignored parameters !=
setter(r2, paramx="x", param1="2")
assert s._hash(r) == s._hash(r2)
# missing parameter
setter(r2, paramx="x")
assert s._hash(r) == s._hash(r2)
# ignorable parameter added
setter(r2, paramx="x", param1="2")
assert s._hash(r) == s._hash(r2)
# not ignorable parameter changed
setter(r2, paramx="y", param1="1")
assert not s._hash(r) == s._hash(r2)
# not ignorable parameter missing
setter(r2, param1="1")
r2.request.content = b"param1=1"
assert not s._hash(r) == s._hash(r2)
def test_ignore_payload_params():
def urlencode_setter(r, **kwargs):
r.request.content = urllib.parse.urlencode(kwargs).encode()
r = tflow.tflow(resp=True)
r.request.headers["Content-Type"] = "application/x-www-form-urlencoded"
r2 = tflow.tflow(resp=True)
r2.request.headers["Content-Type"] = "application/x-www-form-urlencoded"
thash(r, r2, urlencode_setter)
boundary = 'somefancyboundary'
def multipart_setter(r, **kwargs):
b = "--{0}\n".format(boundary)
parts = []
for k, v in kwargs.items():
parts.append(
"Content-Disposition: form-data; name=\"%s\"\n\n"
"%s\n" % (k, v)
)
c = b + b.join(parts) + b
r.request.content = c.encode()
r.request.headers["content-type"] = 'multipart/form-data; boundary=' +\
boundary
r = tflow.tflow(resp=True)
r2 = tflow.tflow(resp=True)
thash(r, r2, multipart_setter)
def test_server_playback_full():
s = serverplayback.ServerPlayback()
with taddons.context() as tctx:
tctx.configure(
s,
refresh_server_playback = True,
keepserving=False
)
f = tflow.tflow()
f.response = mitmproxy.test.tutils.tresp(content=f.request.content)
s.load([f, f])
tf = tflow.tflow()
assert not tf.response
s.request(tf)
assert tf.response == f.response
tf = tflow.tflow()
tf.request.content = b"gibble"
assert not tf.response
s.request(tf)
assert not tf.response
assert not s.stop
s.tick()
assert not s.stop
tf = tflow.tflow()
s.request(tflow.tflow())
assert s.stop
def test_server_playback_kill():
s = serverplayback.ServerPlayback()
with taddons.context() as tctx:
tctx.configure(
s,
refresh_server_playback = True,
replay_kill_extra=True
)
f = tflow.tflow()
f.response = mitmproxy.test.tutils.tresp(content=f.request.content)
s.load([f])
f = tflow.tflow()
f.request.host = "nonexistent"
tctx.cycle(s, f)
assert f.reply.value == exceptions.Kill
| mit | -5,098,919,676,358,967,000 | 25.86921 | 79 | 0.588683 | false |
strands-project/robomongo | src/third-party/mongodb/buildscripts/errorcodes.py | 12 | 5140 | #!/usr/bin/env python
import os
import sys
import re
import utils
assertNames = [ "uassert" , "massert", "fassert", "fassertFailed" ]
def assignErrorCodes():
cur = 10000
for root in assertNames:
for x in utils.getAllSourceFiles():
print( x )
didAnything = False
fixed = ""
for line in open( x ):
s = line.partition( root + "(" )
if s[1] == "" or line.startswith( "#define " + root):
fixed += line
continue
fixed += s[0] + root + "( " + str( cur ) + " , " + s[2]
cur = cur + 1
didAnything = True
if didAnything:
out = open( x , 'w' )
out.write( fixed )
out.close()
codes = []
def readErrorCodes( callback, replaceZero = False ):
quick = [ "assert" , "Exception"]
ps = [ re.compile( "(([umsgf]asser(t|ted))) *\(( *)(\d+)" ) ,
re.compile( "((User|Msg|MsgAssertion)Exceptio(n))\(( *)(\d+)" ),
re.compile( "((fassertFailed)()) *\(( *)(\d+)" )
]
bad = [ re.compile( "\sassert *\(" ) ]
for x in utils.getAllSourceFiles():
needReplace = [False]
lines = []
lastCodes = [0]
lineNum = 1
for line in open( x ):
found = False
for zz in quick:
if line.find( zz ) >= 0:
found = True
break
if found:
if x.find( "src/mongo/" ) >= 0:
for b in bad:
if len(b.findall( line )) > 0:
print( x )
print( line )
raise Exception( "you can't use a bare assert" )
for p in ps:
def repl( m ):
m = m.groups()
start = m[0]
spaces = m[3]
code = m[4]
if code == '0' and replaceZero :
code = getNextCode( lastCodes )
lastCodes.append( code )
code = str( code )
needReplace[0] = True
print( "Adding code " + code + " to line " + x + ":" + str( lineNum ) )
else :
codes.append( ( x , lineNum , line , code ) )
callback( x , lineNum , line , code )
return start + "(" + spaces + code
line = re.sub( p, repl, line )
# end if ps loop
if replaceZero : lines.append( line )
lineNum = lineNum + 1
if replaceZero and needReplace[0] :
print( "Replacing file " + x )
of = open( x + ".tmp", 'w' )
of.write( "".join( lines ) )
of.close()
os.remove(x)
os.rename( x + ".tmp", x )
def getNextCode( lastCodes = [0] ):
highest = [max(lastCodes)]
def check( fileName , lineNum , line , code ):
code = int( code )
if code > highest[0]:
highest[0] = code
readErrorCodes( check )
return highest[0] + 1
def checkErrorCodes():
seen = {}
errors = []
def checkDups( fileName , lineNum , line , code ):
if code in seen:
print( "DUPLICATE IDS" )
print( "%s:%d:%s %s" % ( fileName , lineNum , line.strip() , code ) )
print( "%s:%d:%s %s" % seen[code] )
errors.append( seen[code] )
seen[code] = ( fileName , lineNum , line , code )
readErrorCodes( checkDups, True )
return len( errors ) == 0
def getBestMessage( err , start ):
err = err.partition( start )[2]
if not err:
return ""
err = err.partition( "\"" )[2]
if not err:
return ""
err = err.rpartition( "\"" )[0]
if not err:
return ""
return err
def genErrorOutput():
if os.path.exists( "docs/errors.md" ):
i = open( "docs/errors.md" , "r" )
out = open( "docs/errors.md" , 'wb' )
out.write( "MongoDB Error Codes\n==========\n\n\n" )
prev = ""
seen = {}
codes.sort( key=lambda x: x[0]+"-"+x[3] )
for f,l,line,num in codes:
if num in seen:
continue
seen[num] = True
if f.startswith( "./" ):
f = f[2:]
if f != prev:
out.write( "\n\n" )
out.write( f + "\n----\n" )
prev = f
url = "http://github.com/mongodb/mongo/blob/master/" + f + "#L" + str(l)
out.write( "* " + str(num) + " [code](" + url + ") " + getBestMessage( line , str(num) ) + "\n" )
out.write( "\n" )
out.close()
if __name__ == "__main__":
ok = checkErrorCodes()
print( "ok:" + str( ok ) )
print( "next: " + str( getNextCode() ) )
if ok:
genErrorOutput()
| gpl-3.0 | -4,708,335,960,456,195,000 | 27.715084 | 105 | 0.414008 | false |
KarolBedkowski/photocat | photocat/stats/basic.py | 1 | 2504 | # -*- coding: utf-8 -*-
"""
Photo Catalog v 1.0 (photocat)
Copyright (c) Karol Będkowski, 2004-2010
This file is part of Photo Catalog
"""
__author__ = 'Karol Będkowski'
__copyright__ = 'Copyright (c) Karol Będkowski, 2006-2010'
__version__ = "2010-06-11"
import os.path
from photocat.model import Directory, Collection
from ._stats_provider import StatsProvider
class BasicStats(StatsProvider):
name = _('Basic')
def __init__(self):
StatsProvider.__init__(self)
self._disks = 0
self._dirs = 0
self._files = 0
self._dirs_size = {}
self._dirs_image_cnt = {}
self._disk_image_cnt = {}
self._file_types = {}
def _compute_stats(self, objects):
self._get_items(objects)
yield _('Basic'), [((0, _('Disks')), self._disks, None),
((1, _('Directories')), self._dirs, None),
((2, _('Files')), self._files, None),
]
yield _("Dirs by size"), _compute_stats(self._dirs_size)
yield _("Dirs by images count"), _compute_stats(self._dirs_image_cnt)
yield _("Disks by images count"), _compute_stats(self._disk_image_cnt)
yield _("File formats"), _compute_stats(self._file_types)
def _get_items(self, objects):
for obj in objects:
if isinstance(obj, Collection):
self._find_itmes_in_collection(obj)
elif isinstance(obj, Directory):
self._find_items_in_dir(obj)
def _find_itmes_in_collection(self, collection):
self._disks += len(collection.disks)
for disk in collection.disks:
if self._progress_cb and self._progress_cb():
self._progress_cb()(disk.name)
self._find_items_in_dir(disk)
def _find_items_in_dir(self, directory):
self._dirs += len(directory.subdirs)
for subdir in directory.subdirs:
self._find_items_in_dir(subdir)
disk_name = directory.disk.name
dir_path = disk_name + ':/' + directory.path
if directory.files:
self._files += len(directory.files)
self._dirs_size[dir_path] = directory.directory_size_sumary
self._dirs_image_cnt[dir_path] = len(directory.files)
self._disk_image_cnt[disk_name] = self._disk_image_cnt.get(
disk_name, 0) + len(directory.files)
for img in directory.files:
ext = (('.' in img.name) and \
os.path.splitext(img.name)[-1].lower()[1:]) or ''
self._file_types[ext] = self._file_types.get(ext, 0) + 1
def _compute_stats(data):
all_cnt = float(sum(data.itervalues()))
cnts = sorted((cnt, key) for key, cnt in data.iteritems())
return [((idx, key), cnt, cnt / all_cnt) for idx, (cnt, key)
in enumerate(cnts)]
# vim: encoding=utf8: ff=unix:
| gpl-2.0 | -8,680,978,148,205,904,000 | 28.423529 | 72 | 0.654538 | false |
Distrotech/scons | test/Java/nested-classes.py | 4 | 2835 | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test Java compilation with inner and anonymous classes (Issue 2087).
"""
import os
import TestSCons
_python_ = TestSCons._python_
test = TestSCons.TestSCons()
where_javac, java_version = test.java_where_javac()
# Work around javac 1.4 not reporting its version:
java_version = java_version or "1.4"
test.write('SConstruct', """
env = Environment()
env['JAVAVERSION'] = '%(java_version)s'
classes = env.Java(target = 'build', source = 'source')
env.Jar(target = 'anon.jar', source = classes)
""" % locals())
test.subdir('source', 'build')
test.write(['source', 'Test.java'], """\
public class Test {
class Inner { };
public void testAnon(Test test) { }
public void testAnon(Inner inner) { }
public Test ( ) {
class Foo {
public int reply ( ) {
class Bar { };
return 1 ;
}
} ;
testAnon(new Test() { });
}
public Test (int a) {
class Foo {
public int reply ( ) {
class Bar { };
return 1 ;
}
} ;
testAnon(new Test() { });
}
public Test (int a, int b) {
class Foobar {
public int reply ( ) {
class Bar { };
return 1 ;
}
} ;
testAnon(new Test() { });
}
public Test (int a, int b, int c) {
testAnon(new Test() { });
}
void run() {
testAnon(new Inner() {
public void execute() {
testAnon(new Inner( ) {
public void execute() {
System.out.println("Inside execute()");
}
});
}
});
}
}
""")
test.run(arguments = '.')
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit | -1,148,696,750,213,192,600 | 25.009174 | 73 | 0.638801 | false |
CZ-NIC/dionaea | modules/python/util/readlogsqltree.py | 3 | 18542 | #!/opt/dionaea/bin/python3.2
from __future__ import print_function
from optparse import OptionParser
import sqlite3
import json
def resolve_result(resultcursor):
names = [resultcursor.description[x][0]
for x in range(len(resultcursor.description))]
resolvedresult = [ dict(zip(names, i)) for i in resultcursor]
return resolvedresult
def print_offers(cursor, connection, indent):
r = cursor.execute(
"SELECT * from offers WHERE connection = ?", (connection, ))
offers = resolve_result(r)
for offer in offers:
print("{:s} offer: {:s}".format(' ' * indent, offer['offer_url']))
def print_downloads(cursor, connection, indent):
r = cursor.execute(
"SELECT * from downloads WHERE connection = ?", (connection, ))
downloads = resolve_result(r)
for download in downloads:
print("{:s} download: {:s} {:s}".format(
' ' * indent, download['download_md5_hash'],
download['download_url']))
print_virustotals(cursor, download['download_md5_hash'], indent + 2 )
def print_virustotals(cursor, md5_hash, indent):
r = cursor.execute("""SELECT datetime(virustotal_timestamp, 'unixepoch', 'localtime') as timestamp, virustotal_permalink, COUNT(*) AS scanners,
(
SELECT COUNT(virustotalscan)
FROM virustotals
NATURAL JOIN virustotalscans
WHERE virustotal_md5_hash = ?
AND virustotalscan_result IS NOT NULL ) AS detected
FROM virustotals NATURAL JOIN virustotalscans WHERE virustotal_md5_hash = ?""", (md5_hash, md5_hash))
virustotals = resolve_result(r)
for vt in virustotals:
if vt['timestamp'] is None:
continue
print("{:s} virustotal {} {}/{} ({:.0f}%) {}".format(' ' * indent, vt['timestamp'], vt[
'detected'], vt['scanners'], vt['detected']/vt['scanners']*100, vt['virustotal_permalink']))
r = cursor.execute(
"SELECT DISTINCT virustotalscan_result from virustotals NATURAL JOIN virustotalscans WHERE virustotal_md5_hash = ? AND virustotalscan_result IS NOT NULL", (md5_hash, ))
virustotals = resolve_result(r)
print("{:s} names ".format(' ' * (indent+2)), end='')
for vt in virustotals:
print("'{}' ".format(vt['virustotalscan_result']), end='')
print("")
def print_profiles(cursor, connection, indent):
r = cursor.execute(
"SELECT * from emu_profiles WHERE connection = ?", (connection, ))
profiles = resolve_result(r)
for profile in profiles:
print("{:s} profile: {:s}".format(
' ' * indent, json.loads(profile['emu_profile_json'])))
def print_services(cursor, connection, indent):
r = cursor.execute(
"SELECT * from emu_services WHERE connection = ?", (connection, ))
services = resolve_result(r)
for service in services:
print("{:s} service: {:s}".format(
' ' * indent, service['emu_service_url']))
def print_p0fs(cursor, connection, indent):
r = cursor.execute(
"SELECT * from p0fs WHERE connection = ?", (connection, ))
p0fs = resolve_result(r)
for p0f in p0fs:
print("{:s} p0f: genre:'{}' detail:'{}' uptime:'{}' tos:'{}' dist:'{}' nat:'{}' fw:'{}'".format(
' ' * indent, p0f['p0f_genre'], p0f['p0f_detail'],
p0f['p0f_uptime'], p0f['p0f_tos'], p0f[
'p0f_dist'], p0f['p0f_nat'],
p0f['p0f_fw']))
def print_dcerpcbinds(cursor, connection, indent):
r = cursor.execute("""
SELECT DISTINCT
dcerpcbind_uuid,
dcerpcservice_name,
dcerpcbind_transfersyntax
FROM
dcerpcbinds
LEFT OUTER JOIN dcerpcservices ON (dcerpcbind_uuid = dcerpcservice_uuid)
WHERE
connection = ?""", (connection, ))
dcerpcbinds = resolve_result(r)
for dcerpcbind in dcerpcbinds:
print("{:s} dcerpc bind: uuid '{:s}' ({:s}) transfersyntax {:s}".format(
' ' * indent,
dcerpcbind['dcerpcbind_uuid'],
dcerpcbind['dcerpcservice_name'],
dcerpcbind['dcerpcbind_transfersyntax']) )
def print_dcerpcrequests(cursor, connection, indent):
r = cursor.execute("""
SELECT
dcerpcrequest_uuid,
dcerpcservice_name,
dcerpcrequest_opnum,
dcerpcserviceop_name,
dcerpcserviceop_vuln
FROM
dcerpcrequests
LEFT OUTER JOIN dcerpcservices ON (dcerpcrequest_uuid = dcerpcservice_uuid)
LEFT OUTER JOIN dcerpcserviceops ON (dcerpcservices.dcerpcservice = dcerpcserviceops.dcerpcservice AND dcerpcrequest_opnum = dcerpcserviceop_opnum)
WHERE
connection = ?""", (connection, ))
dcerpcrequests = resolve_result(r)
for dcerpcrequest in dcerpcrequests:
print("{:s} dcerpc request: uuid '{:s}' ({:s}) opnum {:d} ({:s} ({:s}))".format(
' ' * indent,
dcerpcrequest['dcerpcrequest_uuid'],
dcerpcrequest['dcerpcservice_name'],
dcerpcrequest['dcerpcrequest_opnum'],
dcerpcrequest['dcerpcserviceop_name'],
dcerpcrequest['dcerpcserviceop_vuln']) )
def print_sip_commands(cursor, connection, indent):
r = cursor.execute("""
SELECT
sip_command,
sip_command_method,
sip_command_call_id,
sip_command_user_agent,
sip_command_allow
FROM
sip_commands
WHERE
connection = ?""", (connection, ))
sipcommands = resolve_result(r)
for cmd in sipcommands:
print("{:s} Method:{:s}".format(
' ' * indent,
cmd['sip_command_method']))
print("{:s} Call-ID:{:s}".format(
' ' * indent,
cmd['sip_command_call_id']))
print("{:s} User-Agent:{:s}".format(
' ' * indent,
cmd['sip_command_user_agent']))
print_sip_addrs(cursor, cmd['sip_command'], indent+2)
print_sip_vias(cursor, cmd['sip_command'], indent+2)
print_sip_sdp_origins(cursor, cmd['sip_command'], indent+2)
print_sip_sdp_connectiondatas(cursor, cmd['sip_command'], indent+2)
print_sip_sdp_medias(cursor, cmd['sip_command'], indent+2)
def print_sip_addrs(cursor, sip_command, indent):
r = cursor.execute("""
SELECT
sip_addr_type,
sip_addr_display_name,
sip_addr_uri_scheme,
sip_addr_uri_user,
sip_addr_uri_host,
sip_addr_uri_port
FROM
sip_addrs
WHERE
sip_command = ?""", (sip_command, ))
addrs = resolve_result(r)
for addr in addrs:
print("{:s} {:s}: <{}> '{:s}:{:s}@{:s}:{}'".format(
' ' * indent,
addr['sip_addr_type'],
addr['sip_addr_display_name'],
addr['sip_addr_uri_scheme'],
addr['sip_addr_uri_user'],
addr['sip_addr_uri_host'],
addr['sip_addr_uri_port']))
def print_sip_vias(cursor, sip_command, indent):
r = cursor.execute("""
SELECT
sip_via_protocol,
sip_via_address,
sip_via_port
FROM
sip_vias
WHERE
sip_command = ?""", (sip_command, ))
vias = resolve_result(r)
for via in vias:
print("{:s} via:'{:s}/{:s}:{}'".format(
' ' * indent,
via['sip_via_protocol'],
via['sip_via_address'],
via['sip_via_port']))
def print_sip_sdp_origins(cursor, sip_command, indent):
r = cursor.execute("""
SELECT
sip_sdp_origin_username,
sip_sdp_origin_sess_id,
sip_sdp_origin_sess_version,
sip_sdp_origin_nettype,
sip_sdp_origin_addrtype,
sip_sdp_origin_unicast_address
FROM
sip_sdp_origins
WHERE
sip_command = ?""", (sip_command, ))
vias = resolve_result(r)
for via in vias:
print("{:s} o:'{} {} {} {} {} {}'".format(
' ' * indent,
via['sip_sdp_origin_username'],
via['sip_sdp_origin_sess_id'],
via['sip_sdp_origin_sess_version'],
via['sip_sdp_origin_nettype'],
via['sip_sdp_origin_addrtype'],
via['sip_sdp_origin_unicast_address']))
def print_sip_sdp_connectiondatas(cursor, sip_command, indent):
r = cursor.execute("""
SELECT
sip_sdp_connectiondata_nettype,
sip_sdp_connectiondata_addrtype,
sip_sdp_connectiondata_connection_address,
sip_sdp_connectiondata_ttl,
sip_sdp_connectiondata_number_of_addresses
FROM
sip_sdp_connectiondatas
WHERE
sip_command = ?""", (sip_command, ))
vias = resolve_result(r)
for via in vias:
print("{:s} c:'{} {} {} {} {}'".format(
' ' * indent,
via['sip_sdp_connectiondata_nettype'],
via['sip_sdp_connectiondata_addrtype'],
via['sip_sdp_connectiondata_connection_address'],
via['sip_sdp_connectiondata_ttl'],
via['sip_sdp_connectiondata_number_of_addresses']))
def print_sip_sdp_medias(cursor, sip_command, indent):
r = cursor.execute("""
SELECT
sip_sdp_media_media,
sip_sdp_media_port,
sip_sdp_media_number_of_ports,
sip_sdp_media_proto
FROM
sip_sdp_medias
WHERE
sip_command = ?""", (sip_command, ))
vias = resolve_result(r)
for via in vias:
print("{:s} m:'{} {} {} {}'".format(
' ' * indent,
via['sip_sdp_media_media'],
via['sip_sdp_media_port'],
via['sip_sdp_media_number_of_ports'],
via['sip_sdp_media_proto']))
def print_logins(cursor, connection, indent):
r = cursor.execute("""
SELECT
login_username,
login_password
FROM
logins
WHERE connection = ?""", (connection, ))
logins = resolve_result(r)
for login in logins:
print("{:s} login - user:'{:s}' password:'{:s}'".format(
' ' * indent,
login['login_username'],
login['login_password']))
def print_mssql_fingerprints(cursor, connection, indent):
r = cursor.execute("""
SELECT
mssql_fingerprint_hostname,
mssql_fingerprint_appname,
mssql_fingerprint_cltintname
FROM
mssql_fingerprints
WHERE connection = ?""", (connection, ))
fingerprints = resolve_result(r)
for fingerprint in fingerprints:
print("{:s} mssql fingerprint - hostname:'{:s}' cltintname:'{:s}' appname:'{:s}'".format(
' ' * indent,
fingerprint['mssql_fingerprint_hostname'],
fingerprint['mssql_fingerprint_appname'],
fingerprint['mssql_fingerprint_cltintname']))
def print_mssql_commands(cursor, connection, indent):
r = cursor.execute("""
SELECT
mssql_command_status,
mssql_command_cmd
FROM
mssql_commands
WHERE connection = ?""", (connection, ))
commands = resolve_result(r)
for cmd in commands:
print("{:s} mssql command - status:{:s} cmd:'{:s}'".format(
' ' * indent,
cmd['mssql_command_status'],
cmd['mssql_command_cmd']))
def print_mysql_commands(cursor, connection, indent):
r = cursor.execute("""
SELECT
mysql_command,
mysql_command_cmd,
mysql_command_op_name
FROM
mysql_commands
LEFT OUTER JOIN mysql_command_ops USING(mysql_command_cmd)
WHERE
connection = ?""", (connection, ))
commands = resolve_result(r)
for cmd in commands:
print("{:s} mysql command (0x{:02x}) {:s}".format(
' ' * indent,
cmd['mysql_command_cmd'],
cmd['mysql_command_op_name']
), end='')
# args
r = cursor.execute("""
SELECT
mysql_command_arg_data
FROM
mysql_command_args
WHERE
mysql_command = ?
ORDER BY
mysql_command_arg_index ASC """, (cmd['mysql_command'], ))
args = resolve_result(r)
print("({:s})".format(
",".join([ "'%s'" % arg['mysql_command_arg_data'] for arg in args])))
def print_connection(c, indent):
indentStr = ' ' * (indent + 1)
if c['connection_type'] in ['accept', 'reject', 'pending']:
print(indentStr + 'connection {:d} {:s} {:s} {:s} {:s}:{:d} <- {:s}:{:d}'.format(
c['connection'], c['connection_protocol'], c[
'connection_transport'],
c['connection_type'], c['local_host'], c['local_port'],
c['remote_host'], c['remote_port']), end='')
elif c['connection_type'] == 'connect':
print(indentStr + 'connection {:d} {:s} {:s} {:s} {:s}:{:d} -> {:s}/{:s}:{:d}'.format(
c['connection'], c['connection_protocol'],
c['connection_transport'], c['connection_type'], c['local_host'],
c['local_port'], c['remote_hostname'], c['remote_host'],
c['remote_port']), end='')
elif c['connection_type'] == 'listen':
print(indentStr + 'connection {:d} {:s} {:s} {:s} {:s}:{:d}'.format(
c['connection'], c['connection_protocol'],
c['connection_transport'], c['connection_type'], c['local_host'],
c['local_port']), end='')
print(' ({} {})'.format(c['connection_root'], c['connection_parent']))
def recursive_print(cursor, connection, indent):
result = cursor.execute(
"SELECT * from connections WHERE connection_parent = ?", (connection, ))
connections = resolve_result(result)
for c in connections:
if c['connection'] == connection:
continue
print_connection(c, indent+1)
print_p0fs(cursor, c['connection'], indent+2)
print_dcerpcbinds(cursor, c['connection'], indent+2)
print_dcerpcrequests(cursor, c['connection'], indent+2)
print_profiles(cursor, c['connection'], indent+2)
print_offers(cursor, c['connection'], indent+2)
print_downloads(cursor, c['connection'], indent+2)
print_services(cursor, c['connection'], indent+2)
print_sip_commands(cursor, c['connection'], indent+2)
recursive_print(cursor, c['connection'], indent+2)
def print_db(options, args):
dbpath = '/opt/dionaea/var/dionaea/logsql.sqlite'
if len(args) >= 1:
dbpath = args[0]
print("using database located at {0}".format(dbpath))
dbh = sqlite3.connect(dbpath)
cursor = dbh.cursor()
offset = 0
limit = 1000
query = """
SELECT DISTINCT
c.connection AS connection,
connection_root,
connection_parent,
connection_type,
connection_protocol,
connection_transport,
datetime(connection_timestamp, 'unixepoch', 'localtime') AS connection_timestamp,
local_host,
local_port,
remote_host,
remote_hostname,
remote_port
FROM
connections AS c
LEFT OUTER JOIN offers ON (offers.connection = c.connection)
LEFT OUTER JOIN downloads ON (downloads.connection = c.connection)
LEFT OUTER JOIN dcerpcbinds ON (dcerpcbinds.connection = c.connection)
LEFT OUTER JOIN dcerpcrequests ON (dcerpcrequests.connection = c.connection)
WHERE
(c.connection_root = c.connection OR c.connection_root IS NULL)
"""
if options.remote_host:
query = query + \
"\tAND remote_host = '{:s}' \n".format(options.remote_host)
if options.connection:
query = query + \
"\tAND c.connection = {:d} \n".format(options.connection)
if options.in_offer_url:
query = query + \
"\tAND offer_url LIKE '%{:s}%' \n".format(options.in_offer_url)
if options.in_download_url:
query = query + \
"\tAND download_url LIKE '%{:s}%' \n".format(
options.in_download_url)
if options.time_from:
query = query + \
"\tAND connection_timestamp > {:s} \n".format(options.time_from)
if options.time_to:
query = query + \
"\tAND connection_timestamp < {:s} \n".format(options.time_to)
if options.uuid:
query = query + \
"\tAND dcerpcbind_uuid = '{:s}' \n".format(options.uuid)
if options.opnum:
query = query + \
"\tAND dcerpcrequest_opnum = {:s} \n".format(options.opnum)
if options.protocol:
query = query + \
"\tAND connection_protocol = '{:s}' \n".format(options.protocol)
if options.md5sum:
query = query + \
"\tAND download_md5_hash = '{:s}' \n".format(options.md5sum)
if options.type:
query = query + \
"\tAND connection_type = '{:s}' \n".format(options.type)
if options.query:
print(query)
return
while True:
lquery = query + "\t LIMIT {:d} OFFSET {:d} \n".format(limit, offset)
result = cursor.execute(lquery)
connections = resolve_result(result)
# print(connections)
for c in connections:
print("{:s}".format(c['connection_timestamp']))
print_connection(c, 1)
print_p0fs(cursor, c['connection'], 2)
print_dcerpcbinds(cursor, c['connection'], 2)
print_dcerpcrequests(cursor, c['connection'], 2)
print_profiles(cursor, c['connection'], 2)
print_offers(cursor, c['connection'], 2)
print_downloads(cursor, c['connection'], 2)
print_services(cursor, c['connection'], 2)
print_logins(cursor, c['connection'], 2)
print_mssql_fingerprints(cursor, c['connection'], 2)
print_mssql_commands(cursor, c['connection'], 2)
print_mysql_commands(cursor, c['connection'], 2)
print_sip_commands(cursor, c['connection'], 2)
recursive_print(cursor, c['connection'], 2)
offset += limit
if len(connections) != limit:
break
if __name__ == "__main__":
parser = OptionParser()
parser.add_option(
"-r", "--remote-host", action="store", type="string", dest="remote_host")
parser.add_option(
"-o", "--in-offer-url", action="store", type="string", dest="in_offer_url")
parser.add_option("-d", "--in-download-url",
action="store", type="string", dest="in_download_url")
parser.add_option(
"-c", "--connection", action="store", type="int", dest="connection")
parser.add_option(
"-q", "--query-only", action="store_true", dest="query", default=False)
parser.add_option(
"-t", "--time-from", action="store", type="string", dest="time_from")
parser.add_option(
"-T", "--time-to", action="store", type="string", dest="time_to")
parser.add_option(
"-u", "--dcerpcbind-uuid", action="store", type="string", dest="uuid")
parser.add_option(
"-p", "--dcerpcrequest-opnum", action="store", type="string", dest="opnum")
parser.add_option(
"-P", "--protocol", action="store", type="string", dest="protocol")
parser.add_option(
"-m", "--downloads-md5sum", action="store", type="string", dest="md5sum")
parser.add_option(
"-y", "--connection-type", action="store", type="string", dest="type")
(options, args) = parser.parse_args()
print_db(options, args)
| gpl-2.0 | -7,101,606,185,989,129,000 | 34.453155 | 177 | 0.59465 | false |
hasecbinusr/pysal | pysal/region/components.py | 15 | 4728 | """
Checking for connected components in a graph.
"""
__author__ = "Sergio J. Rey <srey@asu.edu>"
__all__ = ["check_contiguity"]
from operator import lt
def is_component(w, ids):
"""Check if the set of ids form a single connected component
Parameters
----------
w : spatial weights boject
ids : list
identifiers of units that are tested to be a single connected
component
Returns
-------
True : if the list of ids represents a single connected component
False : if the list of ids forms more than a single connected component
"""
components = 0
marks = dict([(node, 0) for node in ids])
q = []
for node in ids:
if marks[node] == 0:
components += 1
q.append(node)
if components > 1:
return False
while q:
node = q.pop()
marks[node] = components
others = [neighbor for neighbor in w.neighbors[node]
if neighbor in ids]
for other in others:
if marks[other] == 0 and other not in q:
q.append(other)
return True
def check_contiguity(w, neighbors, leaver):
"""Check if contiguity is maintained if leaver is removed from neighbors
Parameters
----------
w : spatial weights object
simple contiguity based weights
neighbors : list
nodes that are to be checked if they form a single \
connected component
leaver : id
a member of neighbors to check for removal
Returns
-------
True : if removing leaver from neighbors does not break contiguity
of remaining set
in neighbors
False : if removing leaver from neighbors breaks contiguity
Example
-------
Setup imports and a 25x25 spatial weights matrix on a 5x5 square region.
>>> import pysal
>>> w = pysal.lat2W(5, 5)
Test removing various areas from a subset of the region's areas. In the
first case the subset is defined as observations 0, 1, 2, 3 and 4. The
test shows that observations 0, 1, 2 and 3 remain connected even if
observation 4 is removed.
>>> pysal.region.check_contiguity(w,[0,1,2,3,4],4)
True
>>> pysal.region.check_contiguity(w,[0,1,2,3,4],3)
False
>>> pysal.region.check_contiguity(w,[0,1,2,3,4],0)
True
>>> pysal.region.check_contiguity(w,[0,1,2,3,4],1)
False
>>>
"""
ids = neighbors[:]
ids.remove(leaver)
return is_component(w, ids)
class Graph(object):
def __init__(self, undirected=True):
self.nodes = set()
self.edges = {}
self.cluster_lookup = {}
self.no_link = {}
self.undirected = undirected
def add_edge(self, n1, n2, w):
self.nodes.add(n1)
self.nodes.add(n2)
self.edges.setdefault(n1, {}).update({n2: w})
if self.undirected:
self.edges.setdefault(n2, {}).update({n1: w})
def connected_components(self, threshold=0.9, op=lt):
if not self.undirected:
warn = "Warning, connected _components not "
warn += "defined for a directed graph"
print warn
return None
else:
nodes = set(self.nodes)
components, visited = [], set()
while len(nodes) > 0:
connected, visited = self.dfs(
nodes.pop(), visited, threshold, op)
connected = set(connected)
for node in connected:
if node in nodes:
nodes.remove(node)
subgraph = Graph()
subgraph.nodes = connected
subgraph.no_link = self.no_link
for s in subgraph.nodes:
for k, v in self.edges.get(s, {}).iteritems():
if k in subgraph.nodes:
subgraph.edges.setdefault(s, {}).update({k: v})
if s in self.cluster_lookup:
subgraph.cluster_lookup[s] = self.cluster_lookup[s]
components.append(subgraph)
return components
def dfs(self, v, visited, threshold, op=lt, first=None):
aux = [v]
visited.add(v)
if first is None:
first = v
for i in (n for n, w in self.edges.get(v, {}).iteritems()
if op(w, threshold) and n not in visited):
x, y = self.dfs(i, visited, threshold, op, first)
aux.extend(x)
visited = visited.union(y)
return aux, visited
| bsd-3-clause | -1,147,492,675,959,094,000 | 28.55 | 77 | 0.53786 | false |
mshirley/dionaea | modules/python/scripts/smb/include/helpers.py | 9 | 6042 | #********************************************************************************
#* Dionaea
#* - catches bugs -
#*
#*
#*
#* Copyright (C) 2010 Markus Koetter
#* Copyright (C) 2009 Paul Baecher & Markus Koetter & Mark Schloesser
#*
#* This program is free software; you can redistribute it and/or
#* modify it under the terms of the GNU General Public License
#* as published by the Free Software Foundation; either version 2
#* of the License, or (at your option) any later version.
#*
#* This program is distributed in the hope that it will be useful,
#* but WITHOUT ANY WARRANTY; without even the implied warranty of
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#* GNU General Public License for more details.
#*
#* You should have received a copy of the GNU General Public License
#* along with this program; if not, write to the Free Software
#* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#*
#*
#* contact nepenthesdev@gmail.com
#*
#*******************************************************************************/
#* This file was part of Scapy
#* See http://www.secdev.org/projects/scapy for more informations
#* Copyright (C) Philippe Biondi <phil@secdev.org>
#* This program is published under a GPLv2 license
#*******************************************************************************
import re
class VolatileValue:
def __repr__(self):
return "<%s>" % self.__class__.__name__
def __getattr__(self, attr):
if attr == "__setstate__":
raise AttributeError(attr)
return getattr(self._fix(),attr)
def _fix(self):
return None
class Gen(object):
def __iter__(self):
return iter([])
class Net(Gen):
"""Generate a list of IPs from a network address or a name"""
name = "ip"
ipaddress = re.compile(r"^(\*|[0-2]?[0-9]?[0-9](-[0-2]?[0-9]?[0-9])?)\.(\*|[0-2]?[0-9]?[0-9](-[0-2]?[0-9]?[0-9])?)\.(\*|[0-2]?[0-9]?[0-9](-[0-2]?[0-9]?[0-9])?)\.(\*|[0-2]?[0-9]?[0-9](-[0-2]?[0-9]?[0-9])?)(/[0-3]?[0-9])?$")
def __init__(self, net):
self.repr=net
tmp=net.split('/')+["32"]
if not self.ipaddress.match(net):
tmp[0]=socket.gethostbyname(tmp[0])
netmask = int(tmp[1])
def parse_digit(a,netmask):
netmask = min(8,max(netmask,0))
if a == "*":
a = (0,256)
elif a.find("-") >= 0:
x,y = list(map(int,a.split("-")))
if x > y:
y = x
a = (x & (0xff<<netmask) , max(y, (x | (0xff>>(8-netmask))))+1)
else:
a = (int(a) & (0xff<<netmask),(int(a) | (0xff>>(8-netmask)))+1)
return a
self.parsed = list(map(lambda x,y: parse_digit(x,y), tmp[0].split("."), list(map(lambda x,nm=netmask: x-nm, (8,16,24,32)))))
def __iter__(self):
for d in range(*self.parsed[3]):
for c in range(*self.parsed[2]):
for b in range(*self.parsed[1]):
for a in range(*self.parsed[0]):
yield "%i.%i.%i.%i" % (a,b,c,d)
def choice(self):
ip = []
for v in self.parsed:
ip.append(str(random.randint(v[0],v[1]-1)))
return ".".join(ip)
def __repr__(self):
return "Net(%r)" % self.repr
class SetGen(Gen):
def __init__(self, set, _iterpacket=1):
self._iterpacket=_iterpacket
if type(set) is list:
self.set = set
elif isinstance(set, BasePacketList):
self.set = list(set)
else:
self.set = [set]
def transf(self, element):
return element
def __iter__(self):
for i in self.set:
if (type(i) is tuple) and (len(i) == 2) and type(i[0]) is int and type(i[1]) is int:
if (i[0] <= i[1]):
j=i[0]
while j <= i[1]:
yield j
j += 1
elif isinstance(i, Gen) and (self._iterpacket or not isinstance(i,BasePacket)):
for j in i:
yield j
else:
yield i
def __repr__(self):
return "<SetGen %s>" % self.set.__repr__()
class BasePacket(Gen):
pass
class BasePacketList:
pass
def lhex(x):
if type(x) in (int,int):
return hex(x)
elif type(x) is tuple:
return "(%s)" % ", ".join(map(lhex, x))
elif type(x) is list:
return "[%s]" % ", ".join(map(lhex, x))
else:
return x
#########################
#### Enum management ####
#########################
class EnumElement:
_value=None
def __init__(self, key, value):
self._key = key
self._value = value
def __repr__(self):
return "<%s %s[%r]>" % (self.__dict__.get("_name", self.__class__.__name__), self._key, self._value)
def __getattr__(self, attr):
return getattr(self._value, attr)
def __int__(self):
return self._value
def __str__(self):
return self._key
def __eq__(self, other):
return self._value == int(other)
class Enum_metaclass(type):
element_class = EnumElement
def __new__(cls, name, bases, dct):
rdict={}
for k,v in dct.items():
if type(v) is int:
v = cls.element_class(k,v)
dct[k] = v
rdict[type(v)] = k
dct["__rdict__"] = rdict
return super(Enum_metaclass, cls).__new__(cls, name, bases, dct)
def __getitem__(self, attr):
return self.__rdict__[attr]
def __contains__(self, val):
return val in self.__rdict__
def get(self, attr, val=None):
return self._rdict__.get(attr, val)
def __repr__(self):
return "<%s>" % self.__dict__.get("name", self.__name__)
| gpl-2.0 | 4,504,917,729,712,036,400 | 33.135593 | 226 | 0.476663 | false |
huggingface/transformers | src/transformers/models/mpnet/__init__.py | 2 | 3662 | # flake8: noqa
# There's no way to ignore "F401 '...' imported but unused" warnings in this
# module, but to preserve other warnings. So, don't check this module at all.
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from ...file_utils import (
_BaseLazyModule,
is_flax_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = {
"configuration_mpnet": ["MPNET_PRETRAINED_CONFIG_ARCHIVE_MAP", "MPNetConfig"],
"tokenization_mpnet": ["MPNetTokenizer"],
}
if is_tokenizers_available():
_import_structure["tokenization_mpnet_fast"] = ["MPNetTokenizerFast"]
if is_torch_available():
_import_structure["modeling_mpnet"] = [
"MPNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"MPNetForMaskedLM",
"MPNetForMultipleChoice",
"MPNetForQuestionAnswering",
"MPNetForSequenceClassification",
"MPNetForTokenClassification",
"MPNetLayer",
"MPNetModel",
"MPNetPreTrainedModel",
]
if is_tf_available():
_import_structure["modeling_tf_mpnet"] = [
"TF_MPNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFMPNetEmbeddings",
"TFMPNetForMaskedLM",
"TFMPNetForMultipleChoice",
"TFMPNetForQuestionAnswering",
"TFMPNetForSequenceClassification",
"TFMPNetForTokenClassification",
"TFMPNetMainLayer",
"TFMPNetModel",
"TFMPNetPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_mpnet import MPNET_PRETRAINED_CONFIG_ARCHIVE_MAP, MPNetConfig
from .tokenization_mpnet import MPNetTokenizer
if is_tokenizers_available():
from .tokenization_mpnet_fast import MPNetTokenizerFast
if is_torch_available():
from .modeling_mpnet import (
MPNET_PRETRAINED_MODEL_ARCHIVE_LIST,
MPNetForMaskedLM,
MPNetForMultipleChoice,
MPNetForQuestionAnswering,
MPNetForSequenceClassification,
MPNetForTokenClassification,
MPNetLayer,
MPNetModel,
MPNetPreTrainedModel,
)
if is_tf_available():
from .modeling_tf_mpnet import (
TF_MPNET_PRETRAINED_MODEL_ARCHIVE_LIST,
TFMPNetEmbeddings,
TFMPNetForMaskedLM,
TFMPNetForMultipleChoice,
TFMPNetForQuestionAnswering,
TFMPNetForSequenceClassification,
TFMPNetForTokenClassification,
TFMPNetMainLayer,
TFMPNetModel,
TFMPNetPreTrainedModel,
)
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
| apache-2.0 | 8,507,149,536,017,073,000 | 30.568966 | 115 | 0.659476 | false |
GREO/GNU-Radio | gnuradio-examples/python/digital/usrp_options.py | 4 | 5638 | #
# Copyright 2009 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
_parser_to_groups_dict = dict()
class _parser_groups(object):
def __init__(self, parser):
self.usrpx_grp = parser.add_option_group("General USRP Options")
self.usrp1_grp = parser.add_option_group("USRP1 Specific Options")
self.usrp1exp_grp = parser.add_option_group("USRP1 Expert Options")
self.usrp2_grp = parser.add_option_group("USRP2 Specific Options")
import generic_usrp
def _add_options(parser):
"""
Add options to manually choose between usrp or usrp2.
Add options for usb. Add options common to source and sink.
@param parser: instance of OptionParser
@return the parser group
"""
#cache groups so they dont get added twice on tranceiver apps
if not _parser_to_groups_dict.has_key(parser): _parser_to_groups_dict[parser] = _parser_groups(parser)
pg = _parser_to_groups_dict[parser]
#pick usrp or usrp2
pg.usrpx_grp.add_option("-u", "--usrpx", type="string", default=None,
help="specify which usrp model: 1 for USRP, 2 for USRP2 [default=auto]")
#fast usb options
pg.usrp1exp_grp.add_option("-B", "--fusb-block-size", type="int", default=0,
help="specify fast usb block size [default=%default]")
pg.usrp1exp_grp.add_option("-N", "--fusb-nblocks", type="int", default=0,
help="specify number of fast usb blocks [default=%default]")
#lo offset
pg.usrpx_grp.add_option("--lo-offset", type="eng_float", default=None,
help="set LO Offset in Hz [default=automatic].")
#usrp options
pg.usrp1_grp.add_option("-w", "--which", type="int", default=0,
help="select USRP board [default=%default]")
#usrp2 options
pg.usrp2_grp.add_option("-e", "--interface", type="string", default="eth0",
help="Use USRP2 at specified Ethernet interface [default=%default]")
pg.usrp2_grp.add_option("-a", "--mac-addr", type="string", default="",
help="Use USRP2 at specified MAC address [default=None]")
return pg
def add_rx_options(parser):
"""
Add receive specific usrp options.
@param parser: instance of OptionParser
"""
pg = _add_options(parser)
pg.usrp1_grp.add_option("-R", "--rx-subdev-spec", type="subdev", default=None,
help="select USRP Rx side A or B")
pg.usrpx_grp.add_option("--rx-gain", type="eng_float", default=None, metavar="GAIN",
help="set receiver gain in dB [default=midpoint]. See also --show-rx-gain-range")
pg.usrpx_grp.add_option("--show-rx-gain-range", action="store_true", default=False,
help="print min and max Rx gain available on selected daughterboard")
pg.usrpx_grp.add_option("-d", "--decim", type="intx", default=None,
help="set fpga decimation rate to DECIM [default=%default]")
def create_usrp_source(options):
u = generic_usrp.generic_usrp_source_c(
usrpx=options.usrpx,
which=options.which,
subdev_spec=options.rx_subdev_spec,
interface=options.interface,
mac_addr=options.mac_addr,
fusb_block_size=options.fusb_block_size,
fusb_nblocks=options.fusb_nblocks,
lo_offset=options.lo_offset,
gain=options.rx_gain,
)
if options.show_rx_gain_range:
print "Rx Gain Range: minimum = %g, maximum = %g, step size = %g"%tuple(u.gain_range())
return u
def add_tx_options(parser):
"""
Add transmit specific usrp options.
@param parser: instance of OptionParser
"""
pg = _add_options(parser)
pg.usrp1_grp.add_option("-T", "--tx-subdev-spec", type="subdev", default=None,
help="select USRP Rx side A or B")
pg.usrpx_grp.add_option("--tx-gain", type="eng_float", default=None, metavar="GAIN",
help="set transmitter gain in dB [default=midpoint]. See also --show-tx-gain-range")
pg.usrpx_grp.add_option("--show-tx-gain-range", action="store_true", default=False,
help="print min and max Tx gain available on selected daughterboard")
pg.usrpx_grp.add_option("-i", "--interp", type="intx", default=None,
help="set fpga interpolation rate to INTERP [default=%default]")
def create_usrp_sink(options):
u = generic_usrp.generic_usrp_sink_c(
usrpx=options.usrpx,
which=options.which,
subdev_spec=options.tx_subdev_spec,
interface=options.interface,
mac_addr=options.mac_addr,
fusb_block_size=options.fusb_block_size,
fusb_nblocks=options.fusb_nblocks,
lo_offset=options.lo_offset,
gain=options.tx_gain,
)
if options.show_tx_gain_range:
print "Tx Gain Range: minimum = %g, maximum = %g, step size = %g"%tuple(u.gain_range())
return u
| gpl-3.0 | -2,674,493,377,188,589,600 | 44.837398 | 107 | 0.641717 | false |
molokov/mezzanine | mezzanine/blog/management/commands/import_posterous.py | 10 | 4468 | from __future__ import unicode_literals
from datetime import datetime
import json
import time
from django.core.management.base import CommandError
from mezzanine.blog.management.base import BaseImporterCommand
class Command(BaseImporterCommand):
"""
Import Tumblr blog posts into the blog app.
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
"-a", "--api-token", dest="api_token",
help="Posterous API Key")
parser.add_argument(
"-u", "--posterous-user", dest="username",
help="Posterous Username")
parser.add_argument(
"-p", "--posterous-pass", dest="password",
help="Posterous Password")
parser.add_argument(
"-d", "--posterous-host", dest="hostname",
help="Posterous Blog Hostname (no http.. eg. 'foo.com')")
help = "Import Posterous blog posts into the blog app."
def request(self, path, data=None):
try:
import requests
except ImportError:
raise CommandError("Posterous importer requires the requests"
"library installed")
data = data or {}
params = {
'api_token': self.api_token
}
params.update(data)
url = "http://posterous.com/api/2/%s" % path
r = requests.get(url,
data=params,
auth=(self.username, self.password)
)
if r.text.startswith("403"):
raise CommandError(r.text)
try:
response = json.loads(r.text)
return response
except:
raise CommandError(r.text)
def handle_import(self, options):
self.api_token = options.get("api_token")
self.username = options.get("username")
self.password = options.get("password")
hostname = options.get("hostname")
sites = self.request('sites')
site = None
for s in sites:
if s['full_hostname'] == hostname:
site = s
time.sleep(2)
break
if not hostname and not site:
if len(sites) == 1:
site = sites[0]
else:
raise CommandError(
"Please pass your blog hostname if you have more than"
" one blog on your posterous account."
)
page = 1
while True:
path = 'sites/%s/posts' % site['id']
time.sleep(2)
posts = self.request(path, data={'page': page})
if not posts:
break
for post in posts:
content = post['body_full']
title = post['title']
old_url = post['full_url']
tags = [t['name'] for t in post['tags']]
pub_date = datetime.strptime(
post['display_date'][:-6],
"%Y/%m/%d %H:%M:%S"
)
self.add_post(
title=title,
content=content,
pub_date=pub_date,
tags=tags,
old_url=old_url
)
if not post['comments_count']:
continue
path = "sites/%s/posts/%s/comments" % (site['id'], post['id'])
time.sleep(2)
comments = self.request(path)
for comment in comments:
post = None
email = ""
pub_date = datetime.strptime(
comment['created_at'][:-6],
"%Y/%m/%d %H:%M:%S"
)
website = ""
if 'user' in comment:
website = comment['user']['profile_url']
name = comment['user']['display_name']
else:
name = comment['name']
website = "http://twitter.com/%s" % name
body = comment['body']
self.add_comment(
post=post,
name=name,
email=email,
pub_date=pub_date,
website=website,
body=body
)
page += 1
| bsd-2-clause | 6,661,056,581,738,152,000 | 33.369231 | 78 | 0.455237 | false |
DoraemonShare/yuqing | src/utils/config_orm.py | 1 | 2679 | #-*-coding:utf-8 -*-
'''
postgresql configuration
table ORM
'''
import psycopg2
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy import Column, Integer, String, Table, text, Date, Boolean
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
metadata = Base.metadata
engine = create_engine('*************************')
session = sessionmaker(bind = engine)
class BBSPost(Base):
__tablename__ = 'autohome_bbsposts_specified'
# id = Column(Integer(),primary_key=True)
topic = Column(String()) #帖子名
level = Column(String()) #帖子等级,加精、问帖。。。。
writer = Column(String()) #发帖人
publicationDate = Column(Date()) #发帖时间
replyNum = Column(Integer()) #回复数 ---列表页与详情页不一定对得上
clicks = Column(Integer()) #点击数 ---列表页与详情页不一定对得上
finalReplyWriter = Column(String()) #最后回复人
finalReplyDate = Column(Date()) #最后回复时间
postUrl = Column(String(),primary_key=True) #详情页url
qaClassification = Column(String()) #问答帖自带的分类,不一定准
postContent = Column(String()) # 帖子正文
postImageUrl = Column(String()) #帖子中图片的地址
postType = Column(String()) #帖子的等级、类型,比如:提问贴,加精,有图贴等
# class BBSCarList(Base):
# __tablename__ = 'autohome_bbscarlist_specified'
# carType = Column(String()) #车型名
# bbsUrl = Column(String(), primary_key=True) # 论坛地址
# isUpdated = Column(Boolean()) #本次是否更新,用于控制是否需要爬这个分论坛key
class BBSPost_CLF(Base):
__tablename__ = 'autohome_bbsposts_clf_results'
# id = Column(Integer(),primary_key=True)
topic = Column(String()) #帖子名
# writer = Column(String()) #发帖人
publicationDate = Column(Date()) #发帖时间
replyNum = Column(Integer()) #回复数 ---列表页与详情页不一定对得上
clicks = Column(Integer()) #点击数 ---列表页与详情页不一定对得上
# finalReplyWriter = Column(String()) #最后回复人
# finalReplyDate = Column(Date()) #最后回复时间
postUrl = Column(String(),primary_key=True) #详情页url
# qaClassification = Column(String()) #问答帖自带的分类,不一定准
postContent = Column(String()) # 帖子正文
# postImageUrl = Column(String()) #帖子中图片的地址
# postType = Column(String()) #帖子的等级、类型,比如:提问贴,加精,有图贴等
clfResult = Column(String()) #用模型判别出来的结果 | bsd-3-clause | -4,767,302,841,798,857,000 | 32.230769 | 74 | 0.67346 | false |
indhub/mxnet | tests/python/unittest/test_sparse_ndarray.py | 2 | 37684 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pickle as pkl
from mxnet.ndarray import NDArray
from mxnet.test_utils import *
from common import setup_module, with_seed, random_seed, teardown
from mxnet.base import mx_real_t
from numpy.testing import assert_allclose
import numpy.random as rnd
import numpy as np
from common import assertRaises
from mxnet.ndarray.sparse import RowSparseNDArray, CSRNDArray
def sparse_nd_ones(shape, stype):
return mx.nd.ones(shape).tostype(stype)
@with_seed()
def test_sparse_nd_elemwise_add():
def check_sparse_nd_elemwise_binary(shapes, stypes, f, g):
# generate inputs
nds = []
for i, stype in enumerate(stypes):
if stype == 'row_sparse':
nd, _ = rand_sparse_ndarray(shapes[i], stype)
elif stype == 'default':
nd = mx.nd.array(random_arrays(shapes[i]), dtype = np.float32)
else:
assert(False)
nds.append(nd)
# check result
test = f(nds[0], nds[1])
assert_almost_equal(test.asnumpy(), g(nds[0].asnumpy(), nds[1].asnumpy()))
num_repeats = 3
g = lambda x,y: x + y
op = mx.nd.elemwise_add
for i in range(num_repeats):
shape = [rand_shape_2d()] * 2
check_sparse_nd_elemwise_binary(shape, ['default'] * 2, op, g)
check_sparse_nd_elemwise_binary(shape, ['row_sparse', 'row_sparse'], op, g)
@with_seed()
def test_sparse_nd_copy():
def check_sparse_nd_copy(from_stype, to_stype, shape):
from_nd = rand_ndarray(shape, from_stype)
# copy to ctx
to_ctx = from_nd.copyto(default_context())
# copy to stype
to_nd = rand_ndarray(shape, to_stype)
to_nd = from_nd.copyto(to_nd)
assert np.sum(np.abs(from_nd.asnumpy() != to_ctx.asnumpy())) == 0.0
assert np.sum(np.abs(from_nd.asnumpy() != to_nd.asnumpy())) == 0.0
shape = rand_shape_2d()
shape_3d = rand_shape_3d()
stypes = ['row_sparse', 'csr']
for stype in stypes:
check_sparse_nd_copy(stype, 'default', shape)
check_sparse_nd_copy('default', stype, shape)
check_sparse_nd_copy('row_sparse', 'row_sparse', shape_3d)
check_sparse_nd_copy('row_sparse', 'default', shape_3d)
check_sparse_nd_copy('default', 'row_sparse', shape_3d)
@with_seed()
def test_sparse_nd_basic():
def check_sparse_nd_basic_rsp():
storage_type = 'row_sparse'
shape = rand_shape_2d()
nd, (v, idx) = rand_sparse_ndarray(shape, storage_type)
assert(nd._num_aux == 1)
assert(nd.indices.dtype == np.int64)
assert(nd.stype == 'row_sparse')
check_sparse_nd_basic_rsp()
@with_seed()
def test_sparse_nd_setitem():
def check_sparse_nd_setitem(stype, shape, dst):
x = mx.nd.zeros(shape=shape, stype=stype)
x[:] = dst
dst_nd = mx.nd.array(dst) if isinstance(dst, (np.ndarray, np.generic)) else dst
assert np.all(x.asnumpy() == dst_nd.asnumpy() if isinstance(dst_nd, NDArray) else dst)
shape = rand_shape_2d()
for stype in ['row_sparse', 'csr']:
# ndarray assignment
check_sparse_nd_setitem(stype, shape, rand_ndarray(shape, 'default'))
check_sparse_nd_setitem(stype, shape, rand_ndarray(shape, stype))
# numpy assignment
check_sparse_nd_setitem(stype, shape, np.ones(shape))
# scalar assigned to row_sparse NDArray
check_sparse_nd_setitem('row_sparse', shape, 2)
@with_seed()
def test_sparse_nd_slice():
shape = (rnd.randint(2, 10), rnd.randint(2, 10))
stype = 'csr'
A, _ = rand_sparse_ndarray(shape, stype)
A2 = A.asnumpy()
start = rnd.randint(0, shape[0] - 1)
end = rnd.randint(start + 1, shape[0])
assert same(A[start:end].asnumpy(), A2[start:end])
assert same(A[start - shape[0]:end].asnumpy(), A2[start:end])
assert same(A[start:].asnumpy(), A2[start:])
assert same(A[:end].asnumpy(), A2[:end])
ind = rnd.randint(-shape[0], shape[0] - 1)
assert same(A[ind].asnumpy(), A2[ind][np.newaxis, :])
start_col = rnd.randint(0, shape[1] - 1)
end_col = rnd.randint(start_col + 1, shape[1])
result = mx.nd.slice(A, begin=(start, start_col), end=(end, end_col))
result_dense = mx.nd.slice(mx.nd.array(A2), begin=(start, start_col), end=(end, end_col))
assert same(result_dense.asnumpy(), result.asnumpy())
A = mx.nd.sparse.zeros('csr', shape)
A2 = A.asnumpy()
assert same(A[start:end].asnumpy(), A2[start:end])
result = mx.nd.slice(A, begin=(start, start_col), end=(end, end_col))
result_dense = mx.nd.slice(mx.nd.array(A2), begin=(start, start_col), end=(end, end_col))
assert same(result_dense.asnumpy(), result.asnumpy())
def check_slice_nd_csr_fallback(shape):
stype = 'csr'
A, _ = rand_sparse_ndarray(shape, stype)
A2 = A.asnumpy()
start = rnd.randint(0, shape[0] - 1)
end = rnd.randint(start + 1, shape[0])
# non-trivial step should fallback to dense slice op
result = mx.nd.sparse.slice(A, begin=(start,), end=(end + 1,), step=(2,))
result_dense = mx.nd.slice(mx.nd.array(A2), begin=(start,), end=(end + 1,), step=(2,))
assert same(result_dense.asnumpy(), result.asnumpy())
shape = (rnd.randint(2, 10), rnd.randint(1, 10))
check_slice_nd_csr_fallback(shape)
@with_seed()
def test_sparse_nd_concat():
def check_concat(arrays):
ret = np.concatenate([arr.asnumpy() for arr in arrays], axis=0)
same(mx.nd.concat(*arrays, dim=0).asnumpy(), ret)
nds = []
zero_nds = []
ncols = rnd.randint(2, 10)
for i in range(3):
shape = (rnd.randint(2, 10), ncols)
A, _ = rand_sparse_ndarray(shape, 'csr')
nds.append(A)
zero_nds.append(mx.nd.zeros(shape).tostype('csr'))
check_concat(nds)
check_concat(zero_nds)
@with_seed()
def test_sparse_nd_equal():
for stype in ['row_sparse', 'csr']:
shape = rand_shape_2d()
x = mx.nd.zeros(shape=shape, stype=stype)
y = sparse_nd_ones(shape, stype)
z = x == y
assert (z.asnumpy() == np.zeros(shape)).all()
z = 0 == x
assert (z.asnumpy() == np.ones(shape)).all()
@with_seed()
def test_sparse_nd_not_equal():
for stype in ['row_sparse', 'csr']:
shape = rand_shape_2d()
x = mx.nd.zeros(shape=shape, stype=stype)
y = sparse_nd_ones(shape, stype)
z = x != y
assert (z.asnumpy() == np.ones(shape)).all()
z = 0 != x
assert (z.asnumpy() == np.zeros(shape)).all()
@with_seed()
def test_sparse_nd_greater():
for stype in ['row_sparse', 'csr']:
shape = rand_shape_2d()
x = mx.nd.zeros(shape=shape, stype=stype)
y = sparse_nd_ones(shape, stype)
z = x > y
assert (z.asnumpy() == np.zeros(shape)).all()
z = y > 0
assert (z.asnumpy() == np.ones(shape)).all()
z = 0 > y
assert (z.asnumpy() == np.zeros(shape)).all()
@with_seed()
def test_sparse_nd_greater_equal():
for stype in ['row_sparse', 'csr']:
shape = rand_shape_2d()
x = mx.nd.zeros(shape=shape, stype=stype)
y = sparse_nd_ones(shape, stype)
z = x >= y
assert (z.asnumpy() == np.zeros(shape)).all()
z = y >= 0
assert (z.asnumpy() == np.ones(shape)).all()
z = 0 >= y
assert (z.asnumpy() == np.zeros(shape)).all()
z = y >= 1
assert (z.asnumpy() == np.ones(shape)).all()
@with_seed()
def test_sparse_nd_lesser():
for stype in ['row_sparse', 'csr']:
shape = rand_shape_2d()
x = mx.nd.zeros(shape=shape, stype=stype)
y = sparse_nd_ones(shape, stype)
z = y < x
assert (z.asnumpy() == np.zeros(shape)).all()
z = 0 < y
assert (z.asnumpy() == np.ones(shape)).all()
z = y < 0
assert (z.asnumpy() == np.zeros(shape)).all()
@with_seed()
def test_sparse_nd_lesser_equal():
for stype in ['row_sparse', 'csr']:
shape = rand_shape_2d()
x = mx.nd.zeros(shape=shape, stype=stype)
y = sparse_nd_ones(shape, stype)
z = y <= x
assert (z.asnumpy() == np.zeros(shape)).all()
z = 0 <= y
assert (z.asnumpy() == np.ones(shape)).all()
z = y <= 0
assert (z.asnumpy() == np.zeros(shape)).all()
z = 1 <= y
assert (z.asnumpy() == np.ones(shape)).all()
@with_seed()
def test_sparse_nd_binary():
N = 3
def check_binary(fn, stype):
for _ in range(N):
ndim = 2
oshape = np.random.randint(1, 6, size=(ndim,))
bdim = 2
lshape = list(oshape)
# one for broadcast op, another for elemwise op
rshape = list(oshape[ndim-bdim:])
for i in range(bdim):
sep = np.random.uniform(0, 1)
if sep < 0.33:
lshape[ndim-i-1] = 1
elif sep < 0.66:
rshape[bdim-i-1] = 1
lhs = np.random.uniform(0, 1, size=lshape)
rhs = np.random.uniform(0, 1, size=rshape)
lhs_nd = mx.nd.array(lhs).tostype(stype)
rhs_nd = mx.nd.array(rhs).tostype(stype)
assert_allclose(fn(lhs, rhs), fn(lhs_nd, rhs_nd).asnumpy(), rtol=1e-4, atol=1e-4)
assert_allclose(fn(lhs, lhs), fn(lhs_nd, lhs_nd).asnumpy(), rtol=1e-4, atol=1e-4)
stypes = ['row_sparse', 'csr']
for stype in stypes:
check_binary(lambda x, y: x + y, stype)
check_binary(lambda x, y: x - y, stype)
check_binary(lambda x, y: x * y, stype)
check_binary(lambda x, y: x / y, stype)
check_binary(lambda x, y: x ** y, stype)
check_binary(lambda x, y: x > y, stype)
check_binary(lambda x, y: x < y, stype)
check_binary(lambda x, y: x >= y, stype)
check_binary(lambda x, y: x <= y, stype)
check_binary(lambda x, y: x == y, stype)
@with_seed()
def test_sparse_nd_binary_scalar_op():
N = 3
def check(fn, stype, out_stype=None):
for _ in range(N):
ndim = 2
shape = np.random.randint(1, 6, size=(ndim,))
npy = np.random.normal(0, 1, size=shape)
nd = mx.nd.array(npy).tostype(stype)
if out_stype is not None:
assert(nd.stype == out_stype)
assert_allclose(fn(npy), fn(nd).asnumpy(), rtol=1e-4, atol=1e-4)
stypes = ['row_sparse', 'csr']
for stype in stypes:
check(lambda x: 1 + x, stype)
check(lambda x: 1 - x, stype)
check(lambda x: 1 * x, stype)
check(lambda x: 1 / x, stype)
check(lambda x: 2 ** x, stype)
check(lambda x: 1 > x, stype)
check(lambda x: 0.5 > x, stype)
check(lambda x: 0.5 < x, stype)
check(lambda x: 0.5 >= x, stype)
check(lambda x: 0.5 <= x, stype)
check(lambda x: 0.5 == x, stype)
check(lambda x: x / 2, stype, out_stype=stype)
check(lambda x: x + 0, stype, out_stype=stype)
check(lambda x: x - 0, stype, out_stype=stype)
@with_seed()
def test_sparse_nd_binary_iop():
N = 3
def check_binary(fn, stype):
for _ in range(N):
ndim = 2
oshape = np.random.randint(1, 6, size=(ndim,))
lshape = list(oshape)
rshape = list(oshape)
lhs = np.random.uniform(0, 1, size=lshape)
rhs = np.random.uniform(0, 1, size=rshape)
lhs_nd = mx.nd.array(lhs).tostype(stype)
rhs_nd = mx.nd.array(rhs).tostype(stype)
assert_allclose(fn(lhs, rhs),
fn(lhs_nd, rhs_nd).asnumpy(),
rtol=1e-4, atol=1e-4)
def inplace_add(x, y):
x += y
return x
def inplace_mul(x, y):
x *= y
return x
stypes = ['csr', 'row_sparse']
fns = [inplace_add, inplace_mul]
for stype in stypes:
for fn in fns:
check_binary(fn, stype)
@with_seed()
def test_sparse_nd_negate():
def check_sparse_nd_negate(shape, stype):
npy = np.random.uniform(-10, 10, rand_shape_2d())
arr = mx.nd.array(npy).tostype(stype)
assert_almost_equal(npy, arr.asnumpy())
assert_almost_equal(-npy, (-arr).asnumpy())
# a final check to make sure the negation (-) is not implemented
# as inplace operation, so the contents of arr does not change after
# we compute (-arr)
assert_almost_equal(npy, arr.asnumpy())
shape = rand_shape_2d()
stypes = ['csr', 'row_sparse']
for stype in stypes:
check_sparse_nd_negate(shape, stype)
@with_seed()
def test_sparse_nd_broadcast():
sample_num = 1000
# TODO(haibin) test with more than 2 dimensions
def test_broadcast_to(stype):
for i in range(sample_num):
ndim = 2
target_shape = np.random.randint(1, 11, size=ndim)
shape = target_shape.copy()
axis_flags = np.random.randint(0, 2, size=ndim)
axes = []
for (axis, flag) in enumerate(axis_flags):
if flag:
shape[axis] = 1
dat = np.random.rand(*shape) - 0.5
numpy_ret = dat
ndarray = mx.nd.array(dat).tostype(stype)
ndarray_ret = ndarray.broadcast_to(shape=target_shape)
if type(ndarray_ret) is mx.ndarray.NDArray:
ndarray_ret = ndarray_ret.asnumpy()
assert (ndarray_ret.shape == target_shape).all()
err = np.square(ndarray_ret - numpy_ret).mean()
assert err < 1E-8
stypes = ['csr', 'row_sparse']
for stype in stypes:
test_broadcast_to(stype)
@with_seed()
def test_sparse_nd_transpose():
npy = np.random.uniform(-10, 10, rand_shape_2d())
stypes = ['csr', 'row_sparse']
for stype in stypes:
nd = mx.nd.array(npy).tostype(stype)
assert_almost_equal(npy.T, (nd.T).asnumpy())
@with_seed()
def test_sparse_nd_storage_fallback():
def check_output_fallback(shape):
ones = mx.nd.ones(shape)
out = mx.nd.zeros(shape=shape, stype='csr')
mx.nd.broadcast_add(ones, ones * 2, out=out)
assert(np.sum(out.asnumpy() - 3) == 0)
def check_input_fallback(shape):
ones = mx.nd.ones(shape)
out = mx.nd.broadcast_add(ones.tostype('csr'), ones.tostype('row_sparse'))
assert(np.sum(out.asnumpy() - 2) == 0)
def check_fallback_with_temp_resource(shape):
ones = mx.nd.ones(shape)
out = mx.nd.sum(ones)
assert(out.asscalar() == np.prod(shape))
shape = rand_shape_2d()
check_output_fallback(shape)
check_input_fallback(shape)
check_fallback_with_temp_resource(shape)
@with_seed()
def test_sparse_nd_random():
""" test sparse random operator on cpu """
# gpu random operator doesn't use fixed seed
if default_context().device_type is 'gpu':
return
shape = (100, 100)
fns = [mx.nd.random.uniform, mx.nd.random.normal, mx.nd.random.gamma]
for fn in fns:
rsp_out = mx.nd.zeros(shape=shape, stype='row_sparse')
dns_out = mx.nd.zeros(shape=shape, stype='default')
with random_seed(0):
fn(shape=shape, out=dns_out)
with random_seed(0):
fn(shape=shape, out=rsp_out)
assert_almost_equal(dns_out.asnumpy(), rsp_out.asnumpy())
@with_seed()
def test_sparse_nd_astype():
stypes = ['row_sparse', 'csr']
for stype in stypes:
x = mx.nd.zeros(shape=rand_shape_2d(), stype=stype, dtype='float32')
y = x.astype('int32')
assert(y.dtype == np.int32), y.dtype
@with_seed()
def test_sparse_nd_astype_copy():
stypes = ['row_sparse', 'csr']
for stype in stypes:
x = mx.nd.zeros(shape=rand_shape_2d(), stype=stype, dtype='int32')
y = x.astype('float32')
assert (y.dtype == np.float32)
# Test that a new ndarray has been allocated
assert (id(x) != id(y))
y = x.astype('float32', copy=False)
assert (y.dtype == np.float32)
# Test that a new ndarray has been allocated
assert (id(x) != id(y))
y = x.astype('int32')
assert (y.dtype == np.int32)
# Test that a new ndarray has been allocated
# even though they have same dtype
assert (id(x) != id(y))
# Test that a new ndarray has not been allocated
y = x.astype('int32', copy=False)
assert (id(x) == id(y))
# Test the string version 'int32'
# has the same behaviour as the np.int32
y = x.astype(np.int32, copy=False)
assert (id(x) == id(y))
@with_seed(0)
def test_sparse_nd_pickle():
repeat = 1
dim0 = 40
dim1 = 40
stypes = ['row_sparse', 'csr']
densities = [0, 0.5]
stype_dict = {'row_sparse': RowSparseNDArray, 'csr': CSRNDArray}
for _ in range(repeat):
shape = rand_shape_2d(dim0, dim1)
for stype in stypes:
for density in densities:
a, _ = rand_sparse_ndarray(shape, stype, density)
assert isinstance(a, stype_dict[stype])
data = pkl.dumps(a)
b = pkl.loads(data)
assert isinstance(b, stype_dict[stype])
assert same(a.asnumpy(), b.asnumpy())
@with_seed(0)
def test_sparse_nd_save_load():
repeat = 1
stypes = ['default', 'row_sparse', 'csr']
stype_dict = {'default': NDArray, 'row_sparse': RowSparseNDArray, 'csr': CSRNDArray}
num_data = 20
densities = [0, 0.5]
fname = 'tmp_list.bin'
for _ in range(repeat):
data_list1 = []
for i in range(num_data):
stype = stypes[np.random.randint(0, len(stypes))]
shape = rand_shape_2d(dim0=40, dim1=40)
density = densities[np.random.randint(0, len(densities))]
data_list1.append(rand_ndarray(shape, stype, density))
assert isinstance(data_list1[-1], stype_dict[stype])
mx.nd.save(fname, data_list1)
data_list2 = mx.nd.load(fname)
assert len(data_list1) == len(data_list2)
for x, y in zip(data_list1, data_list2):
assert same(x.asnumpy(), y.asnumpy())
data_map1 = {'ndarray xx %s' % i: x for i, x in enumerate(data_list1)}
mx.nd.save(fname, data_map1)
data_map2 = mx.nd.load(fname)
assert len(data_map1) == len(data_map2)
for k, x in data_map1.items():
y = data_map2[k]
assert same(x.asnumpy(), y.asnumpy())
os.remove(fname)
@with_seed()
def test_sparse_nd_unsupported():
nd = mx.nd.zeros((2,2), stype='row_sparse')
fn_slice = lambda x: x._slice(None, None)
fn_at = lambda x: x._at(None)
fn_reshape = lambda x: x.reshape(None)
fns = [fn_slice, fn_at, fn_reshape]
for fn in fns:
try:
fn(nd)
assert(False)
except:
pass
@with_seed()
def test_create_csr():
def check_create_csr_from_nd(shape, density, dtype):
matrix = rand_ndarray(shape, 'csr', density)
# create data array with provided dtype and ctx
data = mx.nd.array(matrix.data.asnumpy(), dtype=dtype)
indptr = matrix.indptr
indices = matrix.indices
csr_created = mx.nd.sparse.csr_matrix((data, indices, indptr), shape=shape)
assert csr_created.stype == 'csr'
assert same(csr_created.data.asnumpy(), data.asnumpy())
assert same(csr_created.indptr.asnumpy(), indptr.asnumpy())
assert same(csr_created.indices.asnumpy(), indices.asnumpy())
# verify csr matrix dtype and ctx is consistent from the ones provided
assert csr_created.dtype == dtype, (csr_created, dtype)
assert csr_created.data.dtype == dtype, (csr_created.data.dtype, dtype)
assert csr_created.context == Context.default_ctx, (csr_created.context, Context.default_ctx)
csr_copy = mx.nd.array(csr_created)
assert(same(csr_copy.asnumpy(), csr_created.asnumpy()))
def check_create_csr_from_coo(shape, density, dtype):
matrix = rand_ndarray(shape, 'csr', density)
sp_csr = matrix.asscipy()
sp_coo = sp_csr.tocoo()
csr_created = mx.nd.sparse.csr_matrix((sp_coo.data, (sp_coo.row, sp_coo.col)), shape=shape, dtype=dtype)
assert csr_created.stype == 'csr'
assert same(csr_created.data.asnumpy(), sp_csr.data)
assert same(csr_created.indptr.asnumpy(), sp_csr.indptr)
assert same(csr_created.indices.asnumpy(), sp_csr.indices)
csr_copy = mx.nd.array(csr_created)
assert(same(csr_copy.asnumpy(), csr_created.asnumpy()))
# verify csr matrix dtype and ctx is consistent
assert csr_created.dtype == dtype, (csr_created.dtype, dtype)
assert csr_created.data.dtype == dtype, (csr_created.data.dtype, dtype)
assert csr_created.context == Context.default_ctx, (csr_created.context, Context.default_ctx)
def check_create_csr_from_scipy(shape, density, f):
def assert_csr_almost_equal(nd, sp):
assert_almost_equal(nd.data.asnumpy(), sp.data)
assert_almost_equal(nd.indptr.asnumpy(), sp.indptr)
assert_almost_equal(nd.indices.asnumpy(), sp.indices)
sp_csr = nd.asscipy()
assert_almost_equal(sp_csr.data, sp.data)
assert_almost_equal(sp_csr.indptr, sp.indptr)
assert_almost_equal(sp_csr.indices, sp.indices)
assert(sp.dtype == sp_csr.dtype), (sp.dtype, sp_csr.dtype)
try:
import scipy.sparse as spsp
# random canonical csr
csr_sp = spsp.rand(shape[0], shape[1], density, format="csr")
csr_nd = f(csr_sp)
assert_csr_almost_equal(csr_nd, csr_sp)
# non-canonical csr which contains duplicates and unsorted indices
indptr = np.array([0, 2, 3, 7])
indices = np.array([0, 2, 2, 0, 1, 2, 1])
data = np.array([1, 2, 3, 4, 5, 6, 1])
non_canonical_csr = spsp.csr_matrix((data, indices, indptr), shape=(3, 3), dtype=csr_nd.dtype)
canonical_csr_nd = f(non_canonical_csr, dtype=csr_nd.dtype)
canonical_csr_sp = non_canonical_csr.copy()
canonical_csr_sp.sum_duplicates()
canonical_csr_sp.sort_indices()
assert_csr_almost_equal(canonical_csr_nd, canonical_csr_sp)
except ImportError:
print("Could not import scipy.sparse. Skipping unit tests for scipy csr creation")
dim0 = 20
dim1 = 20
densities = [0, 0.5]
dtype = np.float64
for density in densities:
shape = rand_shape_2d(dim0, dim1)
check_create_csr_from_nd(shape, density, dtype)
check_create_csr_from_coo(shape, density, dtype)
check_create_csr_from_scipy(shape, density, mx.nd.sparse.array)
check_create_csr_from_scipy(shape, density, mx.nd.array)
@with_seed()
def test_create_row_sparse():
dim0 = 50
dim1 = 50
densities = [0, 0.5, 1]
for density in densities:
shape = rand_shape_2d(dim0, dim1)
matrix = rand_ndarray(shape, 'row_sparse', density)
data = matrix.data
indices = matrix.indices
rsp_created = mx.nd.sparse.row_sparse_array((data, indices), shape=shape)
assert rsp_created.stype == 'row_sparse'
assert same(rsp_created.data.asnumpy(), data.asnumpy())
assert same(rsp_created.indices.asnumpy(), indices.asnumpy())
rsp_copy = mx.nd.array(rsp_created)
assert(same(rsp_copy.asnumpy(), rsp_created.asnumpy()))
# add this test since we added np.int32 and np.int64 to integer_types
if len(shape) == 2:
for np_int_type in (np.int32, np.int64):
shape = list(shape)
shape = [np_int_type(x) for x in shape]
arg1 = tuple(shape)
mx.nd.sparse.row_sparse_array(arg1, tuple(shape))
shape[0] += 1
assert_exception(mx.nd.sparse.row_sparse_array, ValueError, arg1, tuple(shape))
@with_seed()
def test_create_sparse_nd_infer_shape():
def check_create_csr_infer_shape(shape, density, dtype):
try:
matrix = rand_ndarray(shape, 'csr', density=density)
data = matrix.data
indptr = matrix.indptr
indices = matrix.indices
nd = mx.nd.sparse.csr_matrix((data, indices, indptr), dtype=dtype)
num_rows, num_cols = nd.shape
assert(num_rows == len(indptr) - 1)
assert(indices.shape[0] > 0), indices
assert(np.sum((num_cols <= indices).asnumpy()) == 0)
assert(nd.dtype == dtype), (nd.dtype, dtype)
# cannot infer on invalid shape
except ValueError:
pass
def check_create_rsp_infer_shape(shape, density, dtype):
try:
array = rand_ndarray(shape, 'row_sparse', density=density)
data = array.data
indices = array.indices
nd = mx.nd.sparse.row_sparse_array((data, indices), dtype=dtype)
inferred_shape = nd.shape
assert(inferred_shape[1:] == data.shape[1:])
assert(indices.ndim > 0)
assert(nd.dtype == dtype)
if indices.shape[0] > 0:
assert(np.sum((inferred_shape[0] <= indices).asnumpy()) == 0)
# cannot infer on invalid shape
except ValueError:
pass
dtype = np.int32
shape = rand_shape_2d()
shape_3d = rand_shape_3d()
densities = [0, 0.5, 1]
for density in densities:
check_create_csr_infer_shape(shape, density, dtype)
check_create_rsp_infer_shape(shape, density, dtype)
check_create_rsp_infer_shape(shape_3d, density, dtype)
@with_seed()
def test_create_sparse_nd_from_dense():
def check_create_from_dns(shape, f, dense_arr, dtype, default_dtype, ctx):
arr = f(dense_arr, dtype=dtype, ctx=ctx)
assert(same(arr.asnumpy(), np.ones(shape)))
assert(arr.dtype == dtype)
assert(arr.context == ctx)
# verify the default dtype inferred from dense arr
arr2 = f(dense_arr)
assert(arr2.dtype == default_dtype)
assert(arr2.context == Context.default_ctx)
shape = rand_shape_2d()
dtype = np.int32
src_dtype = np.float64
ctx = mx.cpu(1)
dense_arrs = [mx.nd.ones(shape, dtype=src_dtype), np.ones(shape, dtype=src_dtype), \
np.ones(shape, dtype=src_dtype).tolist()]
for f in [mx.nd.sparse.csr_matrix, mx.nd.sparse.row_sparse_array]:
for dense_arr in dense_arrs:
default_dtype = dense_arr.dtype if isinstance(dense_arr, (NDArray, np.ndarray)) \
else np.float32
check_create_from_dns(shape, f, dense_arr, dtype, default_dtype, ctx)
@with_seed()
def test_create_sparse_nd_from_sparse():
def check_create_from_sp(shape, f, sp_arr, dtype, src_dtype, ctx):
arr = f(sp_arr, dtype=dtype, ctx=ctx)
assert(same(arr.asnumpy(), np.ones(shape)))
assert(arr.dtype == dtype)
assert(arr.context == ctx)
# verify the default dtype inferred from dense arr
arr2 = f(sp_arr)
assert(arr2.dtype == src_dtype)
assert(arr2.context == Context.default_ctx)
shape = rand_shape_2d()
src_dtype = np.float64
dtype = np.int32
ctx = mx.cpu(1)
ones = mx.nd.ones(shape, dtype=src_dtype)
csr_arrs = [ones.tostype('csr')]
rsp_arrs = [ones.tostype('row_sparse')]
try:
import scipy.sparse as spsp
csr_sp = spsp.csr_matrix(np.ones(shape, dtype=src_dtype))
csr_arrs.append(csr_sp)
except ImportError:
print("Could not import scipy.sparse. Skipping unit tests for scipy csr creation")
f_csr = mx.nd.sparse.csr_matrix
f_rsp = mx.nd.sparse.row_sparse_array
for sp_arr in csr_arrs:
check_create_from_sp(shape, f_csr, sp_arr, dtype, src_dtype, ctx)
for sp_arr in rsp_arrs:
check_create_from_sp(shape, f_rsp, sp_arr, dtype, src_dtype, ctx)
@with_seed()
def test_create_sparse_nd_empty():
def check_empty(shape, stype):
arr = mx.nd.empty(shape, stype=stype)
assert(arr.stype == stype)
assert same(arr.asnumpy(), np.zeros(shape))
def check_csr_empty(shape, dtype, ctx):
arr = mx.nd.sparse.csr_matrix(shape, dtype=dtype, ctx=ctx)
assert(arr.stype == 'csr')
assert(arr.dtype == dtype)
assert(arr.context == ctx)
assert same(arr.asnumpy(), np.zeros(shape))
# check the default value for dtype and ctx
arr = mx.nd.sparse.csr_matrix(shape)
assert(arr.dtype == np.float32)
assert(arr.context == Context.default_ctx)
def check_rsp_empty(shape, dtype, ctx):
arr = mx.nd.sparse.row_sparse_array(shape, dtype=dtype, ctx=ctx)
assert(arr.stype == 'row_sparse')
assert(arr.dtype == dtype)
assert(arr.context == ctx)
assert same(arr.asnumpy(), np.zeros(shape))
# check the default value for dtype and ctx
arr = mx.nd.sparse.row_sparse_array(shape)
assert(arr.dtype == np.float32)
assert(arr.context == Context.default_ctx)
stypes = ['csr', 'row_sparse']
shape = rand_shape_2d()
shape_3d = rand_shape_3d()
dtype = np.int32
ctx = mx.cpu(1)
for stype in stypes:
check_empty(shape, stype)
check_csr_empty(shape, dtype, ctx)
check_rsp_empty(shape, dtype, ctx)
check_rsp_empty(shape_3d, dtype, ctx)
@with_seed()
def test_synthetic_dataset_generator():
def test_powerlaw_generator(csr_arr, final_row=1):
"""Test power law distribution
Total Elements: 32000, Number of zeros: 3200
Every row has 2 * non zero elements of the previous row.
Also since (2047 < 3200 < 4095) this will be true till 10th row"""
indices = csr_arr.indices.asnumpy()
indptr = csr_arr.indptr.asnumpy()
for row in range(1, final_row + 1):
nextrow = row + 1
current_row_nnz = indices[indptr[row] - 1] + 1
next_row_nnz = indices[indptr[nextrow] - 1] + 1
assert next_row_nnz == 2 * current_row_nnz
# Test if density is preserved
csr_arr_cols, _ = rand_sparse_ndarray(shape=(32, 10000), stype="csr",
density=0.01, distribution="powerlaw")
csr_arr_small, _ = rand_sparse_ndarray(shape=(5, 5), stype="csr",
density=0.5, distribution="powerlaw")
csr_arr_big, _ = rand_sparse_ndarray(shape=(32, 1000000), stype="csr",
density=0.4, distribution="powerlaw")
csr_arr_square, _ = rand_sparse_ndarray(shape=(1600, 1600), stype="csr",
density=0.5, distribution="powerlaw")
assert len(csr_arr_cols.data) == 3200
test_powerlaw_generator(csr_arr_cols, final_row=9)
test_powerlaw_generator(csr_arr_small, final_row=1)
test_powerlaw_generator(csr_arr_big, final_row=4)
test_powerlaw_generator(csr_arr_square, final_row=6)
@with_seed()
def test_sparse_nd_fluent():
def check_fluent_regular(stype, func, kwargs, shape=(5, 17), equal_nan=False):
with mx.name.NameManager():
data = mx.nd.random_uniform(shape=shape, ctx=default_context()).tostype(stype)
regular = getattr(mx.ndarray, func)(data, **kwargs)
fluent = getattr(data, func)(**kwargs)
if isinstance(regular, list):
for r, f in zip(regular, fluent):
assert almost_equal(r.asnumpy(), f.asnumpy(), equal_nan=equal_nan)
else:
assert almost_equal(regular.asnumpy(), fluent.asnumpy(), equal_nan=equal_nan)
all_funcs = ['zeros_like', 'square', 'round', 'rint', 'fix', 'floor', 'ceil', 'trunc',
'abs', 'sign', 'sin', 'degrees', 'radians', 'expm1']
for func in all_funcs:
check_fluent_regular('csr', func, {})
check_fluent_regular('row_sparse', func, {})
all_funcs = ['arcsin', 'arctan', 'tan', 'sinh', 'tanh',
'arcsinh', 'arctanh', 'log1p', 'sqrt', 'relu']
for func in all_funcs:
check_fluent_regular('csr', func, {}, equal_nan=True)
check_fluent_regular('row_sparse', func, {}, equal_nan=True)
check_fluent_regular('csr', 'slice', {'begin': (2, 5), 'end': (4, 7)}, shape=(5, 17))
check_fluent_regular('row_sparse', 'clip', {'a_min': -0.25, 'a_max': 0.75})
for func in ['sum', 'mean', 'norm']:
check_fluent_regular('csr', func, {'axis': 0})
@with_seed()
def test_sparse_nd_exception():
""" test invalid sparse operator will throw a exception """
a = mx.nd.ones((2,2))
assertRaises(mx.base.MXNetError, mx.nd.sparse.retain, a, invalid_arg="garbage_value")
assertRaises(ValueError, mx.nd.sparse.csr_matrix, a, shape=(3,2))
assertRaises(ValueError, mx.nd.sparse.csr_matrix, (2,2), shape=(3,2))
assertRaises(ValueError, mx.nd.sparse.row_sparse_array, (2,2), shape=(3,2))
assertRaises(ValueError, mx.nd.sparse.zeros, "invalid_stype", (2,2))
@with_seed()
def test_sparse_nd_check_format():
""" test check_format for sparse ndarray """
shape = rand_shape_2d()
stypes = ["csr", "row_sparse"]
for stype in stypes:
arr, _ = rand_sparse_ndarray(shape, stype)
arr.check_format()
arr = mx.nd.sparse.zeros(stype, shape)
arr.check_format()
# CSR format index pointer array should be less than the number of rows
shape = (3, 4)
data_list = [7, 8, 9]
indices_list = [0, 2, 1]
indptr_list = [0, 5, 2, 3]
a = mx.nd.sparse.csr_matrix((data_list, indices_list, indptr_list), shape=shape)
assertRaises(mx.base.MXNetError, a.check_format)
# CSR format indices should be in ascending order per row
indices_list = [2, 1, 1]
indptr_list = [0, 2, 2, 3]
a = mx.nd.sparse.csr_matrix((data_list, indices_list, indptr_list), shape=shape)
assertRaises(mx.base.MXNetError, a.check_format)
# CSR format indptr should end with value equal with size of indices
indices_list = [1, 2, 1]
indptr_list = [0, 2, 2, 4]
a = mx.nd.sparse.csr_matrix((data_list, indices_list, indptr_list), shape=shape)
assertRaises(mx.base.MXNetError, a.check_format)
# CSR format indices should not be negative
indices_list = [0, 2, 1]
indptr_list = [0, -2, 2, 3]
a = mx.nd.sparse.csr_matrix((data_list, indices_list, indptr_list), shape=shape)
assertRaises(mx.base.MXNetError, a.check_format)
# Row Sparse format indices should be less than the number of rows
shape = (3, 2)
data_list = [[1, 2], [3, 4]]
indices_list = [1, 4]
a = mx.nd.sparse.row_sparse_array((data_list, indices_list), shape=shape)
assertRaises(mx.base.MXNetError, a.check_format)
# Row Sparse format indices should be in ascending order
indices_list = [1, 0]
a = mx.nd.sparse.row_sparse_array((data_list, indices_list), shape=shape)
assertRaises(mx.base.MXNetError, a.check_format)
# Row Sparse format indices should not be negative
indices_list = [1, -2]
a = mx.nd.sparse.row_sparse_array((data_list, indices_list), shape=shape)
assertRaises(mx.base.MXNetError, a.check_format)
@with_seed()
def test_sparse_nd_norm():
def check_sparse_nd_norm(stype, shape, density, **kwargs):
data, _ = rand_sparse_ndarray(shape, stype, density)
norm = data.norm(**kwargs)
expected_norm = data.tostype('default').norm(**kwargs)
assert_almost_equal(norm.asnumpy(), expected_norm.asnumpy())
shape = (5, 5)
stypes = ['row_sparse', 'csr']
densities = [0, 0.5, 1]
for stype in stypes:
for density in densities:
check_sparse_nd_norm(stype, shape, density, axis=None, keepdims=False, ord=2)
# test fallback
check_sparse_nd_norm(stype, shape, density, axis=0, keepdims=False, ord=2)
check_sparse_nd_norm(stype, shape, density, axis=None, keepdims=True, ord=2)
@with_seed()
def test_sparse_fc():
def check_sparse_fc(batch_size, dim_in, dim_out, stype):
data = rand_ndarray((batch_size, dim_in), stype, density=0.5)
weight = rand_ndarray((dim_out, dim_in), 'row_sparse', density=1)
bias = rand_ndarray((dim_out, 1), 'row_sparse', density=1)
out = mx.nd.sparse.FullyConnected(data, weight, num_hidden=dim_out, bias=bias)
data_dns = data.tostype('default')
weight_dns = weight.tostype('default')
out_dns = mx.nd.FullyConnected(data_dns, weight_dns, num_hidden=dim_out, bias=bias)
assert_almost_equal(out.asnumpy(), out_dns.asnumpy())
# test FC with row_sparse weight w/ density=1, dense data
check_sparse_fc(5, 10, 8, 'default')
# test FC with row_sparse weight w/ density=1, csr data (fallback)
check_sparse_fc(5, 10, 8, 'csr')
if __name__ == '__main__':
import nose
nose.runmodule()
| apache-2.0 | -9,119,553,058,558,258,000 | 37.729702 | 112 | 0.590277 | false |
alexjj/photofloat | scanner/PhotoAlbum.py | 1 | 19553 | from CachePath import *
from datetime import datetime
import json
import os
import os.path
from PIL import Image
from PIL.ExifTags import TAGS
from multiprocessing import Pool
import gc
import tempfile
from VideoToolWrapper import *
def make_photo_thumbs(self, original_path, thumb_path, size):
# The pool methods use a queue.Queue to pass tasks to the worker processes.
# Everything that goes through the queue.Queue must be pickable, and since
# self._photo_thumbnail is not defined at the top level, it's not pickable.
# This is why we have this "dummy" function, so that it's pickable.
self._photo_thumbnail(original_path, thumb_path, size[0], size[1])
class Album(object):
def __init__(self, path):
self._path = trim_base(path)
self._photos = list()
self._albums = list()
self._photos_sorted = True
self._albums_sorted = True
@property
def photos(self):
return self._photos
@property
def albums(self):
return self._albums
@property
def path(self):
return self._path
def __str__(self):
return self.path
@property
def cache_path(self):
return json_cache(self.path)
@property
def date(self):
self._sort()
if len(self._photos) == 0 and len(self._albums) == 0:
return datetime(1900, 1, 1)
elif len(self._photos) == 0:
return self._albums[-1].date
elif len(self._albums) == 0:
return self._photos[-1].date
return max(self._photos[-1].date, self._albums[-1].date)
def __cmp__(self, other):
return cmp(self.date, other.date)
def add_photo(self, photo):
self._photos.append(photo)
self._photos_sorted = False
def add_album(self, album):
self._albums.append(album)
self._albums_sorted = False
def _sort(self):
if not self._photos_sorted:
self._photos.sort()
self._photos_sorted = True
if not self._albums_sorted:
self._albums.sort()
self._albums_sorted = True
@property
def empty(self):
if len(self._photos) != 0:
return False
if len(self._albums) == 0:
return True
for album in self._albums:
if not album.empty:
return False
return True
def cache(self, base_dir):
self._sort()
fp = open(os.path.join(base_dir, self.cache_path), 'w')
json.dump(self, fp, cls=PhotoAlbumEncoder)
fp.close()
@staticmethod
def from_cache(path):
fp = open(path, "r")
dictionary = json.load(fp)
fp.close()
return Album.from_dict(dictionary)
@staticmethod
def from_dict(dictionary, cripple=True):
album = Album(dictionary["path"])
for photo in dictionary["photos"]:
album.add_photo(Photo.from_dict(photo, untrim_base(album.path)))
if not cripple:
for subalbum in dictionary["albums"]:
album.add_album(Album.from_dict(subalbum), cripple)
album._sort()
return album
def to_dict(self, cripple=True):
self._sort()
subalbums = []
if cripple:
for sub in self._albums:
if not sub.empty:
subalbums.append({ "path": trim_base_custom(sub.path, self._path), "date": sub.date })
else:
for sub in self._albums:
if not sub.empty:
subalbums.append(sub)
return { "path": self.path, "date": self.date, "albums": subalbums, "photos": self._photos }
def photo_from_path(self, path):
for photo in self._photos:
if trim_base(path) == photo._path:
return photo
return None
class Photo(object):
thumb_sizes = [ (75, True), (150, True), (640, False), (1024, False), (1600, False) ]
def __init__(self, path, thumb_path=None, attributes=None):
self._path = trim_base(path)
self.is_valid = True
image = None
try:
mtime = file_mtime(path)
except KeyboardInterrupt:
raise
except:
self.is_valid = False
return
if attributes is not None and attributes["dateTimeFile"] >= mtime:
self._attributes = attributes
return
self._attributes = {}
self._attributes["dateTimeFile"] = mtime
self._attributes["mediaType"] = "photo"
try:
image = Image.open(path)
except KeyboardInterrupt:
raise
except:
self._video_metadata(path)
if isinstance(image, Image.Image):
self._photo_metadata(image)
self._photo_thumbnails(path, thumb_path)
elif self._attributes["mediaType"] == "video":
self._video_thumbnails(thumb_path, path)
self._video_transcode(thumb_path, path)
else:
self.is_valid = False
return
def _photo_metadata(self, image):
self._attributes["size"] = image.size
self._orientation = 1
try:
info = image._getexif()
except KeyboardInterrupt:
raise
except:
return
if not info:
return
exif = {}
for tag, value in info.items():
decoded = TAGS.get(tag, tag)
if isinstance(value, str) or isinstance(value, unicode):
value = value.strip().partition("\x00")[0]
if (isinstance(decoded, str) or isinstance(decoded, unicode)) and decoded.startswith("DateTime"):
try:
value = datetime.strptime(value, '%Y:%m:%d %H:%M:%S')
except KeyboardInterrupt:
raise
except:
continue
exif[decoded] = value
if "Orientation" in exif:
self._orientation = exif["Orientation"];
if self._orientation in range(5, 9):
self._attributes["size"] = (self._attributes["size"][1], self._attributes["size"][0])
if self._orientation - 1 < len(self._photo_metadata.orientation_list):
self._attributes["orientation"] = self._photo_metadata.orientation_list[self._orientation - 1]
if "Make" in exif:
self._attributes["make"] = exif["Make"]
if "Model" in exif:
self._attributes["model"] = exif["Model"]
if "ApertureValue" in exif:
self._attributes["aperture"] = exif["ApertureValue"]
elif "FNumber" in exif:
self._attributes["aperture"] = exif["FNumber"]
if "FocalLength" in exif:
self._attributes["focalLength"] = exif["FocalLength"]
if "ISOSpeedRatings" in exif:
self._attributes["iso"] = exif["ISOSpeedRatings"]
if "ISO" in exif:
self._attributes["iso"] = exif["ISO"]
if "PhotographicSensitivity" in exif:
self._attributes["iso"] = exif["PhotographicSensitivity"]
if "ExposureTime" in exif:
self._attributes["exposureTime"] = exif["ExposureTime"]
if "Flash" in exif and exif["Flash"] in self._photo_metadata.flash_dictionary:
try:
self._attributes["flash"] = self._photo_metadata.flash_dictionary[exif["Flash"]]
except KeyboardInterrupt:
raise
except:
pass
if "LightSource" in exif and exif["LightSource"] in self._photo_metadata.light_source_dictionary:
try:
self._attributes["lightSource"] = self._photo_metadata.light_source_dictionary[exif["LightSource"]]
except KeyboardInterrupt:
raise
except:
pass
if "ExposureProgram" in exif and exif["ExposureProgram"] < len(self._photo_metadata.exposure_list):
self._attributes["exposureProgram"] = self._photo_metadata.exposure_list[exif["ExposureProgram"]]
if "SpectralSensitivity" in exif:
self._attributes["spectralSensitivity"] = exif["SpectralSensitivity"]
if "MeteringMode" in exif and exif["MeteringMode"] < len(self._photo_metadata.metering_list):
self._attributes["meteringMode"] = self._photo_metadata.metering_list[exif["MeteringMode"]]
if "SensingMethod" in exif and exif["SensingMethod"] < len(self._photo_metadata.sensing_method_list):
self._attributes["sensingMethod"] = self._photo_metadata.sensing_method_list[exif["SensingMethod"]]
if "SceneCaptureType" in exif and exif["SceneCaptureType"] < len(self._photo_metadata.scene_capture_type_list):
self._attributes["sceneCaptureType"] = self._photo_metadata.scene_capture_type_list[exif["SceneCaptureType"]]
if "SubjectDistanceRange" in exif and exif["SubjectDistanceRange"] < len(self._photo_metadata.subject_distance_range_list):
self._attributes["subjectDistanceRange"] = self._photo_metadata.subject_distance_range_list[exif["SubjectDistanceRange"]]
if "ExposureCompensation" in exif:
self._attributes["exposureCompensation"] = exif["ExposureCompensation"]
if "ExposureBiasValue" in exif:
self._attributes["exposureCompensation"] = exif["ExposureBiasValue"]
if "DateTimeOriginal" in exif:
try:
self._attributes["dateTimeOriginal"] = datetime.strptime(exif["DateTimeOriginal"], '%Y:%m:%d %H:%M:%S')
except KeyboardInterrupt:
raise
except TypeError:
self._attributes["dateTimeOriginal"] = exif["DateTimeOriginal"]
if "DateTime" in exif:
try:
self._attributes["dateTime"] = datetime.strptime(exif["DateTime"], '%Y:%m:%d %H:%M:%S')
except KeyboardInterrupt:
raise
except TypeError:
self._attributes["dateTime"] = exif["DateTime"]
_photo_metadata.flash_dictionary = {0x0: "No Flash", 0x1: "Fired",0x5: "Fired, Return not detected",0x7: "Fired, Return detected",0x8: "On, Did not fire",0x9: "On, Fired",0xd: "On, Return not detected",0xf: "On, Return detected",0x10: "Off, Did not fire",0x14: "Off, Did not fire, Return not detected",0x18: "Auto, Did not fire",0x19: "Auto, Fired",0x1d: "Auto, Fired, Return not detected",0x1f: "Auto, Fired, Return detected",0x20: "No flash function",0x30: "Off, No flash function",0x41: "Fired, Red-eye reduction",0x45: "Fired, Red-eye reduction, Return not detected",0x47: "Fired, Red-eye reduction, Return detected",0x49: "On, Red-eye reduction",0x4d: "On, Red-eye reduction, Return not detected",0x4f: "On, Red-eye reduction, Return detected",0x50: "Off, Red-eye reduction",0x58: "Auto, Did not fire, Red-eye reduction",0x59: "Auto, Fired, Red-eye reduction",0x5d: "Auto, Fired, Red-eye reduction, Return not detected",0x5f: "Auto, Fired, Red-eye reduction, Return detected"}
_photo_metadata.light_source_dictionary = {0: "Unknown", 1: "Daylight", 2: "Fluorescent", 3: "Tungsten (incandescent light)", 4: "Flash", 9: "Fine weather", 10: "Cloudy weather", 11: "Shade", 12: "Daylight fluorescent (D 5700 - 7100K)", 13: "Day white fluorescent (N 4600 - 5400K)", 14: "Cool white fluorescent (W 3900 - 4500K)", 15: "White fluorescent (WW 3200 - 3700K)", 17: "Standard light A", 18: "Standard light B", 19: "Standard light C", 20: "D55", 21: "D65", 22: "D75", 23: "D50", 24: "ISO studio tungsten"}
_photo_metadata.metering_list = ["Unknown", "Average", "Center-weighted average", "Spot", "Multi-spot", "Multi-segment", "Partial"]
_photo_metadata.exposure_list = ["Not Defined", "Manual", "Program AE", "Aperture-priority AE", "Shutter speed priority AE", "Creative (Slow speed)", "Action (High speed)", "Portrait", "Landscape", "Bulb"]
_photo_metadata.orientation_list = ["Horizontal (normal)", "Mirror horizontal", "Rotate 180", "Mirror vertical", "Mirror horizontal and rotate 270 CW", "Rotate 90 CW", "Mirror horizontal and rotate 90 CW", "Rotate 270 CW"]
_photo_metadata.sensing_method_list = ["Not defined", "One-chip color area sensor", "Two-chip color area sensor", "Three-chip color area sensor", "Color sequential area sensor", "Trilinear sensor", "Color sequential linear sensor"]
_photo_metadata.scene_capture_type_list = ["Standard", "Landscape", "Portrait", "Night scene"]
_photo_metadata.subject_distance_range_list = ["Unknown", "Macro", "Close view", "Distant view"]
def _video_metadata(self, path, original=True):
p = VideoProbeWrapper().call('-show_format', '-show_streams', '-of', 'json', '-loglevel', '0', path)
if p == False:
self.is_valid = False
return
info = json.loads(p)
for s in info["streams"]:
if 'codec_type' in s and s['codec_type'] == 'video':
self._attributes["mediaType"] = "video"
self._attributes["size"] = (int(s["width"]), int(s["height"]))
if "duration" in s:
self._attributes["duration"] = s["duration"]
if "tags" in s and "rotate" in s["tags"]:
self._attributes["rotate"] = s["tags"]["rotate"]
if original:
self._attributes["originalSize"] = (int(s["width"]), int(s["height"]))
break
def _photo_thumbnail(self, original_path, thumb_path, size, square=False):
try:
image = Image.open(original_path)
except KeyboardInterrupt:
raise
except:
self.is_valid = False
return
mirror = image
if self._orientation == 2:
# Vertical Mirror
mirror = image.transpose(Image.FLIP_LEFT_RIGHT)
elif self._orientation == 3:
# Rotation 180
mirror = image.transpose(Image.ROTATE_180)
elif self._orientation == 4:
# Horizontal Mirror
mirror = image.transpose(Image.FLIP_TOP_BOTTOM)
elif self._orientation == 5:
# Horizontal Mirror + Rotation 270
mirror = image.transpose(Image.FLIP_TOP_BOTTOM).transpose(Image.ROTATE_270)
elif self._orientation == 6:
# Rotation 270
mirror = image.transpose(Image.ROTATE_270)
elif self._orientation == 7:
# Vertical Mirror + Rotation 270
mirror = image.transpose(Image.FLIP_LEFT_RIGHT).transpose(Image.ROTATE_270)
elif self._orientation == 8:
# Rotation 90
mirror = image.transpose(Image.ROTATE_90)
image = mirror
self._thumbnail(image, original_path, thumb_path, size, square)
def _thumbnail(self, image, original_path, thumb_path, size, square):
thumb_path = os.path.join(thumb_path, image_cache(self._path, size, square))
info_string = "%s -> %spx" % (os.path.basename(original_path), str(size))
if square:
info_string += ", square"
message("thumbing", info_string)
if os.path.exists(thumb_path) and file_mtime(thumb_path) >= self._attributes["dateTimeFile"]:
return
gc.collect()
try:
image = image.copy()
except KeyboardInterrupt:
raise
except:
try:
image = image.copy() # we try again to work around PIL bug
except KeyboardInterrupt:
raise
except:
message("corrupt image", os.path.basename(original_path))
self.is_valid = False
return
if square:
if image.size[0] > image.size[1]:
left = (image.size[0] - image.size[1]) / 2
top = 0
right = image.size[0] - ((image.size[0] - image.size[1]) / 2)
bottom = image.size[1]
else:
left = 0
top = (image.size[1] - image.size[0]) / 2
right = image.size[0]
bottom = image.size[1] - ((image.size[1] - image.size[0]) / 2)
image = image.crop((left, top, right, bottom))
gc.collect()
image.thumbnail((size, size), Image.ANTIALIAS)
try:
image.save(thumb_path, "JPEG", quality=88)
except KeyboardInterrupt:
try:
os.unlink(thumb_path)
except:
pass
raise
except:
message("save failure", os.path.basename(thumb_path))
try:
os.unlink(thumb_path)
except:
pass
def _photo_thumbnails(self, original_path, thumb_path):
# get number of cores on the system, and use all minus one
num_of_cores = os.sysconf('SC_NPROCESSORS_ONLN') - 1
pool = Pool(processes=num_of_cores)
for size in Photo.thumb_sizes:
pool.apply_async(make_photo_thumbs, args = (self, original_path, thumb_path, size))
pool.close()
pool.join()
def _video_thumbnails(self, thumb_path, original_path):
(tfd, tfn) = tempfile.mkstemp();
p = VideoTranscodeWrapper().call('-i', original_path, '-f', 'image2', '-vsync', '1', '-vframes', '1', '-an', '-loglevel', 'quiet', tfn)
if p == False:
message("couldn't extract video frame", os.path.basename(original_path))
os.unlink(tfn)
self.is_valid = False
return
try:
image = Image.open(tfn)
except KeyboardInterrupt:
raise
except:
message("couldn't open video thumbnail", tfn)
os.unlink(tfn)
self.is_valid = False
return
mirror = image
if "rotate" in self._attributes:
if self._attributes["rotate"] == "90":
mirror = image.transpose(Image.ROTATE_270)
elif self._attributes["rotate"] == "180":
mirror = image.transpose(Image.ROTATE_180)
elif self._attributes["rotate"] == "270":
mirror = image.transpose(Image.ROTATE_90)
for size in Photo.thumb_sizes:
if size[1]:
self._thumbnail(mirror, original_path, thumb_path, size[0], size[1])
os.unlink(tfn)
def _video_transcode(self, transcode_path, original_path):
transcode_path = os.path.join(transcode_path, video_cache(self._path))
# get number of cores on the system, and use all minus one
num_of_cores = os.sysconf('SC_NPROCESSORS_ONLN') - 1
transcode_cmd = ['-i', original_path, '-c:v', 'libvpx', '-crf', '10', '-b:v', '4M', '-c:a', 'libvorbis', '-f', 'webm', '-threads', str(num_of_cores), '-loglevel', '0', '-y']
filters = []
info_string = "%s -> webm" % (os.path.basename(original_path))
message("transcoding", info_string)
if os.path.exists(transcode_path) and file_mtime(transcode_path) >= self._attributes["dateTimeFile"]:
self._video_metadata(transcode_path, False)
return
if "originalSize" in self._attributes and self._attributes["originalSize"][1] > 720:
filters.append("scale='trunc(oh*a/2)*2:min(720\,iw)'")
if "rotate" in self._attributes:
if self._attributes["rotate"] == "90":
filters.append('transpose=1')
elif self._attributes["rotate"] == "180":
filters.append('vflip,hflip')
elif self._attributes["rotate"] == "270":
filters.append('transpose=2')
if len(filters):
transcode_cmd.append('-vf')
transcode_cmd.append(','.join(filters))
transcode_cmd.append(transcode_path)
p = VideoTranscodeWrapper().call(*transcode_cmd)
if p == False:
message("transcoding failure", os.path.basename(original_path))
try:
os.unlink(transcode_path)
except:
pass
self.is_valid = False
return
self._video_metadata(transcode_path, False)
@property
def name(self):
return os.path.basename(self._path)
def __str__(self):
return self.name
@property
def path(self):
return self._path
@property
def image_caches(self):
caches = []
if "mediaType" in self._attributes and self._attributes["mediaType"] == "video":
for size in Photo.thumb_sizes:
if size[1]:
caches.append(image_cache(self._path, size[0], size[1]))
caches.append(video_cache(self._path))
else:
caches = [image_cache(self._path, size[0], size[1]) for size in Photo.thumb_sizes]
return caches
@property
def date(self):
correct_date = None;
if not self.is_valid:
correct_date = datetime(1900, 1, 1)
if "dateTimeOriginal" in self._attributes:
correct_date = self._attributes["dateTimeOriginal"]
elif "dateTime" in self._attributes:
correct_date = self._attributes["dateTime"]
else:
correct_date = self._attributes["dateTimeFile"]
return correct_date
def __cmp__(self, other):
date_compare = cmp(self.date, other.date)
if date_compare == 0:
return cmp(self.name, other.name)
return date_compare
@property
def attributes(self):
return self._attributes
@staticmethod
def from_dict(dictionary, basepath):
del dictionary["date"]
path = os.path.join(basepath, dictionary["name"])
del dictionary["name"]
for key, value in dictionary.items():
if key.startswith("dateTime"):
try:
dictionary[key] = datetime.strptime(dictionary[key], "%a %b %d %H:%M:%S %Y")
except KeyboardInterrupt:
raise
except:
pass
return Photo(path, None, dictionary)
def to_dict(self):
photo = { "name": self.name, "date": self.date }
photo.update(self.attributes)
return photo
class PhotoAlbumEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return obj.strftime("%a %b %d %H:%M:%S %Y")
if isinstance(obj, Album) or isinstance(obj, Photo):
return obj.to_dict()
return json.JSONEncoder.default(self, obj)
| gpl-2.0 | -2,343,875,836,871,158,000 | 37.872763 | 982 | 0.662098 | false |
rgommers/statsmodels | statsmodels/graphics/factorplots.py | 28 | 7596 | # -*- coding: utf-8 -*-
"""
Authors: Josef Perktold, Skipper Seabold, Denis A. Engemann
"""
from statsmodels.compat.python import get_function_name, iterkeys, lrange, zip, iteritems
import numpy as np
from statsmodels.graphics.plottools import rainbow
import statsmodels.graphics.utils as utils
def interaction_plot(x, trace, response, func=np.mean, ax=None, plottype='b',
xlabel=None, ylabel=None, colors=[], markers=[],
linestyles=[], legendloc='best', legendtitle=None,
**kwargs):
"""
Interaction plot for factor level statistics.
Note. If categorial factors are supplied levels will be internally
recoded to integers. This ensures matplotlib compatiblity.
uses pandas.DataFrame to calculate an `aggregate` statistic for each
level of the factor or group given by `trace`.
Parameters
----------
x : array-like
The `x` factor levels constitute the x-axis. If a `pandas.Series` is
given its name will be used in `xlabel` if `xlabel` is None.
trace : array-like
The `trace` factor levels will be drawn as lines in the plot.
If `trace` is a `pandas.Series` its name will be used as the
`legendtitle` if `legendtitle` is None.
response : array-like
The reponse or dependent variable. If a `pandas.Series` is given
its name will be used in `ylabel` if `ylabel` is None.
func : function
Anything accepted by `pandas.DataFrame.aggregate`. This is applied to
the response variable grouped by the trace levels.
plottype : str {'line', 'scatter', 'both'}, optional
The type of plot to return. Can be 'l', 's', or 'b'
ax : axes, optional
Matplotlib axes instance
xlabel : str, optional
Label to use for `x`. Default is 'X'. If `x` is a `pandas.Series` it
will use the series names.
ylabel : str, optional
Label to use for `response`. Default is 'func of response'. If
`response` is a `pandas.Series` it will use the series names.
colors : list, optional
If given, must have length == number of levels in trace.
linestyles : list, optional
If given, must have length == number of levels in trace.
markers : list, optional
If given, must have length == number of lovels in trace
kwargs
These will be passed to the plot command used either plot or scatter.
If you want to control the overall plotting options, use kwargs.
Returns
-------
fig : Figure
The figure given by `ax.figure` or a new instance.
Examples
--------
>>> import numpy as np
>>> np.random.seed(12345)
>>> weight = np.random.randint(1,4,size=60)
>>> duration = np.random.randint(1,3,size=60)
>>> days = np.log(np.random.randint(1,30, size=60))
>>> fig = interaction_plot(weight, duration, days,
... colors=['red','blue'], markers=['D','^'], ms=10)
>>> import matplotlib.pyplot as plt
>>> plt.show()
.. plot::
import numpy as np
from statsmodels.graphics.factorplots import interaction_plot
np.random.seed(12345)
weight = np.random.randint(1,4,size=60)
duration = np.random.randint(1,3,size=60)
days = np.log(np.random.randint(1,30, size=60))
fig = interaction_plot(weight, duration, days,
colors=['red','blue'], markers=['D','^'], ms=10)
import matplotlib.pyplot as plt
#plt.show()
"""
from pandas import DataFrame
fig, ax = utils.create_mpl_ax(ax)
response_name = ylabel or getattr(response, 'name', 'response')
ylabel = '%s of %s' % (get_function_name(func), response_name)
xlabel = xlabel or getattr(x, 'name', 'X')
legendtitle = legendtitle or getattr(trace, 'name', 'Trace')
ax.set_ylabel(ylabel)
ax.set_xlabel(xlabel)
x_values = x_levels = None
if isinstance(x[0], str):
x_levels = [l for l in np.unique(x)]
x_values = lrange(len(x_levels))
x = _recode(x, dict(zip(x_levels, x_values)))
data = DataFrame(dict(x=x, trace=trace, response=response))
plot_data = data.groupby(['trace', 'x']).aggregate(func).reset_index()
# return data
# check plot args
n_trace = len(plot_data['trace'].unique())
if linestyles:
try:
assert len(linestyles) == n_trace
except AssertionError as err:
raise ValueError("Must be a linestyle for each trace level")
else: # set a default
linestyles = ['-'] * n_trace
if markers:
try:
assert len(markers) == n_trace
except AssertionError as err:
raise ValueError("Must be a linestyle for each trace level")
else: # set a default
markers = ['.'] * n_trace
if colors:
try:
assert len(colors) == n_trace
except AssertionError as err:
raise ValueError("Must be a linestyle for each trace level")
else: # set a default
#TODO: how to get n_trace different colors?
colors = rainbow(n_trace)
if plottype == 'both' or plottype == 'b':
for i, (values, group) in enumerate(plot_data.groupby(['trace'])):
# trace label
label = str(group['trace'].values[0])
ax.plot(group['x'], group['response'], color=colors[i],
marker=markers[i], label=label,
linestyle=linestyles[i], **kwargs)
elif plottype == 'line' or plottype == 'l':
for i, (values, group) in enumerate(plot_data.groupby(['trace'])):
# trace label
label = str(group['trace'].values[0])
ax.plot(group['x'], group['response'], color=colors[i],
label=label, linestyle=linestyles[i], **kwargs)
elif plottype == 'scatter' or plottype == 's':
for i, (values, group) in enumerate(plot_data.groupby(['trace'])):
# trace label
label = str(group['trace'].values[0])
ax.scatter(group['x'], group['response'], color=colors[i],
label=label, marker=markers[i], **kwargs)
else:
raise ValueError("Plot type %s not understood" % plottype)
ax.legend(loc=legendloc, title=legendtitle)
ax.margins(.1)
if all([x_levels, x_values]):
ax.set_xticks(x_values)
ax.set_xticklabels(x_levels)
return fig
def _recode(x, levels):
""" Recode categorial data to int factor.
Parameters
----------
x : array-like
array like object supporting with numpy array methods of categorially
coded data.
levels : dict
mapping of labels to integer-codings
Returns
-------
out : instance numpy.ndarray
"""
from pandas import Series
name = None
if isinstance(x, Series):
name = x.name
x = x.values
if x.dtype.type not in [np.str_, np.object_]:
raise ValueError('This is not a categorial factor.'
' Array of str type required.')
elif not isinstance(levels, dict):
raise ValueError('This is not a valid value for levels.'
' Dict required.')
elif not (np.unique(x) == np.unique(list(iterkeys(levels)))).all():
raise ValueError('The levels do not match the array values.')
else:
out = np.empty(x.shape[0], dtype=np.int)
for level, coding in iteritems(levels):
out[x == level] = coding
if name:
out = Series(out)
out.name = name
return out
| bsd-3-clause | 512,731,838,078,959,400 | 34.830189 | 89 | 0.598078 | false |
cemoody/chainer | chainer/function_set.py | 5 | 4081 | import numpy
import warnings
from chainer import cuda
from chainer import link
class FunctionSet(link.Chain):
"""Set of links (as "parameterized functions").
FunctionSet is a subclass of :class:`~chainer.Chain`. Function
registration is done just by adding an attribute to :class:`
object.
.. deprecated:: v1.5
Use :class:`~chainer.Chain` instead.
.. note::
FunctionSet was used for manipulation of one or more parameterized
functions. The concept of parameterized function is gone, and it has
been replaced by :class:`~chainer.Link` and :class:`~chainer.Chain`.
"""
def __init__(self, **links):
super(FunctionSet, self).__init__(**links)
warnings.warn('FunctionSet is deprecated. Use Chain instead.',
DeprecationWarning)
def __setattr__(self, key, value):
d = self.__dict__
if isinstance(value, link.Link):
# we cannot use add_link here since add_link calls setattr, and we
# should allow overwriting for backward compatibility
if value.name is not None:
raise ValueError(
'given link is already registered to another chain by name'
' %s' % value.name)
if key in d:
d[key].name = None
del d[key]
else:
d['_children'].append(key)
value.name = key
# deal with properties
prop = getattr(self.__class__, key, None)
if isinstance(prop, property) and prop.fset is not None:
prop.fset(self, value)
else:
super(FunctionSet, self).__setattr__(key, value)
def collect_parameters(self):
"""Returns a tuple of parameters and gradients.
Returns:
Tuple (pair) of two tuples. The first element is a tuple of
parameter arrays, and the second is a tuple of gradient arrays.
"""
msg = ("'collect_parameters' is deprecated. "
"You can pass FunctionSet itself to 'optimizer.setup'")
warnings.warn(msg, FutureWarning)
return self
def __getitem__(self, key):
"""Returns an attribute by name.
Args:
key (str): Name of the attribute.
Returns:
Attribute.
.. admonition:: Example
>>> model = FunctionSet(l1=L.Linear(10, 10), l2=L.Linear(10, 10))
>>> l1 = model['l1'] # equivalent to l1 = model.l1
"""
return getattr(self, key)
def copy_parameters_from(self, params):
"""Copies parameters from another source without reallocation.
Args:
params (Iterable): Iterable of parameter arrays.
"""
for dst, src in zip(self.parameters, params):
if isinstance(dst, numpy.ndarray):
if isinstance(src, numpy.ndarray):
numpy.copyto(dst, src)
else:
dst[:] = src.get()
elif isinstance(src, numpy.ndarray):
dst.set(src)
else:
cuda.copy(src, out=dst)
@property
def parameters(self):
"""Tuple of parameter arrays of all registered functions.
The order of parameters is consistent with :meth:`parameters` property.
"""
return tuple(param.data for param in self.params())
@parameters.setter
def parameters(self, params):
assert len(params) == len([_ for _ in self.params()])
for dst, src in zip(self.params(), params):
dst.data = src
@property
def gradients(self):
"""Tuple of gradient arrays of all registered functions.
The order of gradients is consistent with :meth:`parameters` property.
"""
return tuple(param.grad for param in self.params())
@gradients.setter
def gradients(self, grads):
assert len(grads) == len([_ for _ in self.params()])
for dst, src in zip(self.params(), grads):
dst.grad = src
| mit | -3,009,841,584,834,285,000 | 30.882813 | 79 | 0.570693 | false |
LambdaCast/LambdaCast | portal/migrations/0004_auto__add_field_video_user.py | 1 | 9437 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Video.user'
db.add_column('portal_video', 'user',
self.gf('django.db.models.fields.related.OneToOneField')(to=orm['auth.User'], unique=True, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Video.user'
db.delete_column('portal_video', 'user_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
},
'portal.channel': {
'Meta': {'object_name': 'Channel'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': 'None', 'unique_with': '()'}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'contained_videos'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['portal.Video']"})
},
'portal.comment': {
'Meta': {'object_name': 'Comment'},
'comment': ('django.db.models.fields.TextField', [], {'max_length': '1000'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'timecode': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Video']"})
},
'portal.video': {
'Meta': {'object_name': 'Video'},
'assemblyid': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'channel': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['portal.Channel']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date': ('django.db.models.fields.DateField', [], {}),
'description': ('django.db.models.fields.TextField', [], {}),
'duration': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'encodingDone': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.IntegerField', [], {'max_length': '1'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'mp3Size': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'mp3URL': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'mp4Size': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'mp4URL': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'oggSize': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'oggURL': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'originalFile': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
'protocolURL': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': 'None', 'unique_with': '()'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'torrentURL': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'videoThumbURL': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'webmSize': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'webmURL': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['portal'] | bsd-2-clause | -7,255,439,666,737,504,000 | 75.112903 | 203 | 0.546678 | false |
chaincoin/chaincoin | qa/rpc-tests/p2p-acceptblock.py | 2 | 11947 | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import time
from test_framework.blocktools import create_block, create_coinbase
'''
AcceptBlockTest -- test processing of unrequested blocks.
Since behavior differs when receiving unrequested blocks from whitelisted peers
versus non-whitelisted peers, this tests the behavior of both (effectively two
separate tests running in parallel).
Setup: two nodes, node0 and node1, not connected to each other. Node0 does not
whitelist localhost, but node1 does. They will each be on their own chain for
this test.
We have one NodeConn connection to each, test_node and white_node respectively.
The test:
1. Generate one block on each node, to leave IBD.
2. Mine a new block on each tip, and deliver to each node from node's peer.
The tip should advance.
3. Mine a block that forks the previous block, and deliver to each node from
corresponding peer.
Node0 should not process this block (just accept the header), because it is
unrequested and doesn't have more work than the tip.
Node1 should process because this is coming from a whitelisted peer.
4. Send another block that builds on the forking block.
Node0 should process this block but be stuck on the shorter chain, because
it's missing an intermediate block.
Node1 should reorg to this longer chain.
4b.Send 288 more blocks on the longer chain.
Node0 should process all but the last block (too far ahead in height).
Send all headers to Node1, and then send the last block in that chain.
Node1 should accept the block because it's coming from a whitelisted peer.
5. Send a duplicate of the block in #3 to Node0.
Node0 should not process the block because it is unrequested, and stay on
the shorter chain.
6. Send Node0 an inv for the height 3 block produced in #4 above.
Node0 should figure out that Node0 has the missing height 2 block and send a
getdata.
7. Send Node0 the missing block again.
Node0 should process and the tip should advance.
'''
# TestNode: bare-bones "peer". Used mostly as a conduit for a test to sending
# p2p messages to a node, generating the messages in the main testing logic.
class TestNode(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong()
def add_connection(self, conn):
self.connection = conn
# Track the last getdata message we receive (used in the test)
def on_getdata(self, conn, message):
self.last_getdata = message
# Spin until verack message is received from the node.
# We use this to signal that our test can begin. This
# is called from the testing thread, so it needs to acquire
# the global lock.
def wait_for_verack(self):
while True:
with mininode_lock:
if self.verack_received:
return
time.sleep(0.05)
# Wrapper for the NodeConn's send_message function
def send_message(self, message):
self.connection.send_message(message)
def on_pong(self, conn, message):
self.last_pong = message
# Sync up with the node after delivery of a block
def sync_with_ping(self, timeout=30):
self.connection.send_message(msg_ping(nonce=self.ping_counter))
received_pong = False
sleep_time = 0.05
while not received_pong and timeout > 0:
time.sleep(sleep_time)
timeout -= sleep_time
with mininode_lock:
if self.last_pong.nonce == self.ping_counter:
received_pong = True
self.ping_counter += 1
return received_pong
class AcceptBlockTest(BitcoinTestFramework):
def add_options(self, parser):
parser.add_option("--testbinary", dest="testbinary",
default=os.getenv("BITCOIND", "bitcoind"),
help="bitcoind binary to test")
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 2
def setup_network(self):
# Node0 will be used to test behavior of processing unrequested blocks
# from peers which are not whitelisted, while Node1 will be used for
# the whitelisted case.
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug"],
binary=self.options.testbinary))
self.nodes.append(start_node(1, self.options.tmpdir,
["-debug", "-whitelist=127.0.0.1"],
binary=self.options.testbinary))
def run_test(self):
# Setup the p2p connections and start up the network thread.
test_node = TestNode() # connects to node0 (not whitelisted)
white_node = TestNode() # connects to node1 (whitelisted)
connections = []
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node))
connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], white_node))
test_node.add_connection(connections[0])
white_node.add_connection(connections[1])
NetworkThread().start() # Start up network handling in another thread
# Test logic begins here
test_node.wait_for_verack()
white_node.wait_for_verack()
# 1. Have both nodes mine a block (leave IBD)
[ n.generate(1) for n in self.nodes ]
tips = [ int("0x" + n.getbestblockhash(), 0) for n in self.nodes ]
# 2. Send one block that builds on each tip.
# This should be accepted.
blocks_h2 = [] # the height 2 blocks on each node's chain
block_time = int(time.time()) + 1
for i in range(2):
blocks_h2.append(create_block(tips[i], create_coinbase(2), block_time))
blocks_h2[i].solve()
block_time += 1
test_node.send_message(msg_block(blocks_h2[0]))
white_node.send_message(msg_block(blocks_h2[1]))
[ x.sync_with_ping() for x in [test_node, white_node] ]
assert_equal(self.nodes[0].getblockcount(), 2)
assert_equal(self.nodes[1].getblockcount(), 2)
print("First height 2 block accepted by both nodes")
# 3. Send another block that builds on the original tip.
blocks_h2f = [] # Blocks at height 2 that fork off the main chain
for i in range(2):
blocks_h2f.append(create_block(tips[i], create_coinbase(2), blocks_h2[i].nTime+1))
blocks_h2f[i].solve()
test_node.send_message(msg_block(blocks_h2f[0]))
white_node.send_message(msg_block(blocks_h2f[1]))
[ x.sync_with_ping() for x in [test_node, white_node] ]
for x in self.nodes[0].getchaintips():
if x['hash'] == blocks_h2f[0].hash:
assert_equal(x['status'], "headers-only")
for x in self.nodes[1].getchaintips():
if x['hash'] == blocks_h2f[1].hash:
assert_equal(x['status'], "valid-headers")
print("Second height 2 block accepted only from whitelisted peer")
# 4. Now send another block that builds on the forking chain.
blocks_h3 = []
for i in range(2):
blocks_h3.append(create_block(blocks_h2f[i].sha256, create_coinbase(3), blocks_h2f[i].nTime+1))
blocks_h3[i].solve()
test_node.send_message(msg_block(blocks_h3[0]))
white_node.send_message(msg_block(blocks_h3[1]))
[ x.sync_with_ping() for x in [test_node, white_node] ]
# Since the earlier block was not processed by node0, the new block
# can't be fully validated.
for x in self.nodes[0].getchaintips():
if x['hash'] == blocks_h3[0].hash:
assert_equal(x['status'], "headers-only")
# But this block should be accepted by node0 since it has more work.
self.nodes[0].getblock(blocks_h3[0].hash)
print("Unrequested more-work block accepted from non-whitelisted peer")
# Node1 should have accepted and reorged.
assert_equal(self.nodes[1].getblockcount(), 3)
print("Successfully reorged to length 3 chain from whitelisted peer")
# 4b. Now mine 288 more blocks and deliver; all should be processed but
# the last (height-too-high) on node0. Node1 should process the tip if
# we give it the headers chain leading to the tip.
tips = blocks_h3
headers_message = msg_headers()
all_blocks = [] # node0's blocks
for j in range(2):
for i in range(288):
next_block = create_block(tips[j].sha256, create_coinbase(i + 4), tips[j].nTime+1)
next_block.solve()
if j==0:
test_node.send_message(msg_block(next_block))
all_blocks.append(next_block)
else:
headers_message.headers.append(CBlockHeader(next_block))
tips[j] = next_block
time.sleep(2)
# Blocks 1-287 should be accepted, block 288 should be ignored because it's too far ahead
for x in all_blocks[:-1]:
self.nodes[0].getblock(x.hash)
assert_raises_jsonrpc(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[-1].hash)
headers_message.headers.pop() # Ensure the last block is unrequested
white_node.send_message(headers_message) # Send headers leading to tip
white_node.send_message(msg_block(tips[1])) # Now deliver the tip
white_node.sync_with_ping()
self.nodes[1].getblock(tips[1].hash)
print("Unrequested block far ahead of tip accepted from whitelisted peer")
# 5. Test handling of unrequested block on the node that didn't process
# Should still not be processed (even though it has a child that has more
# work).
test_node.send_message(msg_block(blocks_h2f[0]))
# Here, if the sleep is too short, the test could falsely succeed (if the
# node hasn't processed the block by the time the sleep returns, and then
# the node processes it and incorrectly advances the tip).
# But this would be caught later on, when we verify that an inv triggers
# a getdata request for this block.
test_node.sync_with_ping()
assert_equal(self.nodes[0].getblockcount(), 2)
print("Unrequested block that would complete more-work chain was ignored")
# 6. Try to get node to request the missing block.
# Poke the node with an inv for block at height 3 and see if that
# triggers a getdata on block 2 (it should if block 2 is missing).
with mininode_lock:
# Clear state so we can check the getdata request
test_node.last_getdata = None
test_node.send_message(msg_inv([CInv(2, blocks_h3[0].sha256)]))
test_node.sync_with_ping()
with mininode_lock:
getdata = test_node.last_getdata
# Check that the getdata includes the right block
assert_equal(getdata.inv[0].hash, blocks_h2f[0].sha256)
print("Inv at tip triggered getdata for unprocessed block")
# 7. Send the missing block for the third time (now it is requested)
test_node.send_message(msg_block(blocks_h2f[0]))
test_node.sync_with_ping()
assert_equal(self.nodes[0].getblockcount(), 290)
print("Successfully reorged to longer chain from non-whitelisted peer")
[ c.disconnect_node() for c in connections ]
if __name__ == '__main__':
AcceptBlockTest().main()
| mit | -325,994,662,659,658,430 | 41.820789 | 107 | 0.63924 | false |
baylee/django | tests/gis_tests/rasterapp/test_rasterfield.py | 8 | 13831 | import json
from django.contrib.gis.db.models.lookups import (
DistanceLookupBase, gis_lookups,
)
from django.contrib.gis.gdal import HAS_GDAL
from django.contrib.gis.geos import GEOSGeometry
from django.contrib.gis.measure import D
from django.contrib.gis.shortcuts import numpy
from django.core.exceptions import ImproperlyConfigured
from django.db.models import Q
from django.test import (
TestCase, TransactionTestCase, mock, skipUnlessDBFeature,
)
from ..data.rasters.textrasters import JSON_RASTER
from ..models import models
from .models import RasterModel, RasterRelatedModel
if HAS_GDAL:
from django.contrib.gis.gdal import GDALRaster
@skipUnlessDBFeature('supports_raster')
class RasterFieldTest(TransactionTestCase):
available_apps = ['gis_tests.rasterapp']
def setUp(self):
rast = GDALRaster({
"srid": 4326,
"origin": [0, 0],
"scale": [-1, 1],
"skew": [0, 0],
"width": 5,
"height": 5,
"nr_of_bands": 2,
"bands": [{"data": range(25)}, {"data": range(25, 50)}],
})
model_instance = RasterModel.objects.create(
rast=rast,
rastprojected=rast,
geom="POINT (-95.37040 29.70486)",
)
RasterRelatedModel.objects.create(rastermodel=model_instance)
def test_field_null_value(self):
"""
Test creating a model where the RasterField has a null value.
"""
r = RasterModel.objects.create(rast=None)
r.refresh_from_db()
self.assertIsNone(r.rast)
def test_access_band_data_directly_from_queryset(self):
RasterModel.objects.create(rast=JSON_RASTER)
qs = RasterModel.objects.all()
qs[0].rast.bands[0].data()
def test_model_creation(self):
"""
Test RasterField through a test model.
"""
# Create model instance from JSON raster
r = RasterModel.objects.create(rast=JSON_RASTER)
r.refresh_from_db()
# Test raster metadata properties
self.assertEqual((5, 5), (r.rast.width, r.rast.height))
self.assertEqual([0.0, -1.0, 0.0, 0.0, 0.0, 1.0], r.rast.geotransform)
self.assertIsNone(r.rast.bands[0].nodata_value)
# Compare srs
self.assertEqual(r.rast.srs.srid, 4326)
# Compare pixel values
band = r.rast.bands[0].data()
# If numpy, convert result to list
if numpy:
band = band.flatten().tolist()
# Loop through rows in band data and assert single
# value is as expected.
self.assertEqual(
[
0.0, 1.0, 2.0, 3.0, 4.0,
5.0, 6.0, 7.0, 8.0, 9.0,
10.0, 11.0, 12.0, 13.0, 14.0,
15.0, 16.0, 17.0, 18.0, 19.0,
20.0, 21.0, 22.0, 23.0, 24.0
],
band
)
def test_implicit_raster_transformation(self):
"""
Test automatic transformation of rasters with srid different from the
field srid.
"""
# Parse json raster
rast = json.loads(JSON_RASTER)
# Update srid to another value
rast['srid'] = 3086
# Save model and get it from db
r = RasterModel.objects.create(rast=rast)
r.refresh_from_db()
# Confirm raster has been transformed to the default srid
self.assertEqual(r.rast.srs.srid, 4326)
# Confirm geotransform is in lat/lon
self.assertEqual(
r.rast.geotransform,
[-87.9298551266551, 9.459646421449934e-06, 0.0,
23.94249275457565, 0.0, -9.459646421449934e-06]
)
def test_verbose_name_arg(self):
"""
RasterField should accept a positional verbose name argument.
"""
self.assertEqual(
RasterModel._meta.get_field('rast').verbose_name,
'A Verbose Raster Name'
)
def test_all_gis_lookups_with_rasters(self):
"""
Evaluate all possible lookups for all input combinations (i.e.
raster-raster, raster-geom, geom-raster) and for projected and
unprojected coordinate systems. This test just checks that the lookup
can be called, but doesn't check if the result makes logical sense.
"""
from django.contrib.gis.db.backends.postgis.operations import PostGISOperations
# Create test raster and geom.
rast = GDALRaster(json.loads(JSON_RASTER))
stx_pnt = GEOSGeometry('POINT (-95.370401017314293 29.704867409475465)', 4326)
stx_pnt.transform(3086)
# Loop through all the GIS lookups.
for name, lookup in gis_lookups.items():
# Construct lookup filter strings.
combo_keys = [
field + name for field in [
'rast__', 'rast__', 'rastprojected__0__', 'rast__',
'rastprojected__', 'geom__', 'rast__',
]
]
if issubclass(lookup, DistanceLookupBase):
# Set lookup values for distance lookups.
combo_values = [
(rast, 50, 'spheroid'),
(rast, 0, 50, 'spheroid'),
(rast, 0, D(km=1)),
(stx_pnt, 0, 500),
(stx_pnt, D(km=1000)),
(rast, 500),
(json.loads(JSON_RASTER), 500),
]
elif name == 'relate':
# Set lookup values for the relate lookup.
combo_values = [
(rast, 'T*T***FF*'),
(rast, 0, 'T*T***FF*'),
(rast, 0, 'T*T***FF*'),
(stx_pnt, 0, 'T*T***FF*'),
(stx_pnt, 'T*T***FF*'),
(rast, 'T*T***FF*'),
(json.loads(JSON_RASTER), 'T*T***FF*'),
]
elif name == 'isvalid':
# The isvalid lookup doesn't make sense for rasters.
continue
elif PostGISOperations.gis_operators[name].func:
# Set lookup values for all function based operators.
combo_values = [
rast, (rast, 0), (rast, 0), (stx_pnt, 0), stx_pnt,
rast, rast, json.loads(JSON_RASTER)
]
else:
# Override band lookup for these, as it's not supported.
combo_keys[2] = 'rastprojected__' + name
# Set lookup values for all other operators.
combo_values = [rast, rast, rast, stx_pnt, stx_pnt, rast, rast, json.loads(JSON_RASTER)]
# Create query filter combinations.
combos = [{x[0]: x[1]} for x in zip(combo_keys, combo_values)]
for combo in combos:
# Apply this query filter.
qs = RasterModel.objects.filter(**combo)
# Evaluate normal filter qs.
self.assertTrue(qs.count() in [0, 1])
# Evaluate on conditional Q expressions.
qs = RasterModel.objects.filter(Q(**combos[0]) & Q(**combos[1]))
self.assertTrue(qs.count() in [0, 1])
def test_dwithin_gis_lookup_ouptut_with_rasters(self):
"""
Check the logical functionality of the dwithin lookup for different
input parameters.
"""
# Create test raster and geom.
rast = GDALRaster(json.loads(JSON_RASTER))
stx_pnt = GEOSGeometry('POINT (-95.370401017314293 29.704867409475465)', 4326)
stx_pnt.transform(3086)
# Filter raster with different lookup raster formats.
qs = RasterModel.objects.filter(rastprojected__dwithin=(rast, D(km=1)))
self.assertEqual(qs.count(), 1)
qs = RasterModel.objects.filter(rastprojected__dwithin=(json.loads(JSON_RASTER), D(km=1)))
self.assertEqual(qs.count(), 1)
qs = RasterModel.objects.filter(rastprojected__dwithin=(JSON_RASTER, D(km=1)))
self.assertEqual(qs.count(), 1)
# Filter in an unprojected coordinate system.
qs = RasterModel.objects.filter(rast__dwithin=(rast, 40))
self.assertEqual(qs.count(), 1)
# Filter with band index transform.
qs = RasterModel.objects.filter(rast__1__dwithin=(rast, 1, 40))
self.assertEqual(qs.count(), 1)
qs = RasterModel.objects.filter(rast__1__dwithin=(rast, 40))
self.assertEqual(qs.count(), 1)
qs = RasterModel.objects.filter(rast__dwithin=(rast, 1, 40))
self.assertEqual(qs.count(), 1)
# Filter raster by geom.
qs = RasterModel.objects.filter(rast__dwithin=(stx_pnt, 500))
self.assertEqual(qs.count(), 1)
qs = RasterModel.objects.filter(rastprojected__dwithin=(stx_pnt, D(km=10000)))
self.assertEqual(qs.count(), 1)
qs = RasterModel.objects.filter(rast__dwithin=(stx_pnt, 5))
self.assertEqual(qs.count(), 0)
qs = RasterModel.objects.filter(rastprojected__dwithin=(stx_pnt, D(km=100)))
self.assertEqual(qs.count(), 0)
# Filter geom by raster.
qs = RasterModel.objects.filter(geom__dwithin=(rast, 500))
self.assertEqual(qs.count(), 1)
# Filter through related model.
qs = RasterRelatedModel.objects.filter(rastermodel__rast__dwithin=(rast, 40))
self.assertEqual(qs.count(), 1)
# Filter through related model with band index transform
qs = RasterRelatedModel.objects.filter(rastermodel__rast__1__dwithin=(rast, 40))
self.assertEqual(qs.count(), 1)
# Filter through conditional statements.
qs = RasterModel.objects.filter(Q(rast__dwithin=(rast, 40)) & Q(rastprojected__dwithin=(stx_pnt, D(km=10000))))
self.assertEqual(qs.count(), 1)
# Filter through different lookup.
qs = RasterModel.objects.filter(rastprojected__bbcontains=rast)
self.assertEqual(qs.count(), 1)
def test_lookup_input_tuple_too_long(self):
rast = GDALRaster(json.loads(JSON_RASTER))
qs = RasterModel.objects.filter(rast__bbcontains=(rast, 1, 2))
msg = 'Tuple too long for lookup bbcontains.'
with self.assertRaisesMessage(ValueError, msg):
qs.count()
def test_lookup_input_band_not_allowed(self):
rast = GDALRaster(json.loads(JSON_RASTER))
qs = RasterModel.objects.filter(rast__bbcontains=(rast, 1))
msg = 'Band indices are not allowed for this operator, it works on bbox only.'
with self.assertRaisesMessage(ValueError, msg):
qs.count()
def test_isvalid_lookup_with_raster_error(self):
qs = RasterModel.objects.filter(rast__isvalid=True)
msg = 'The isvalid lookup is only available on geometry fields.'
with self.assertRaisesMessage(ValueError, msg):
qs.count()
def test_result_of_gis_lookup_with_rasters(self):
# Point is in the interior
qs = RasterModel.objects.filter(rast__contains=GEOSGeometry('POINT (-0.5 0.5)', 4326))
self.assertEqual(qs.count(), 1)
# Point is in the exterior
qs = RasterModel.objects.filter(rast__contains=GEOSGeometry('POINT (0.5 0.5)', 4326))
self.assertEqual(qs.count(), 0)
# A point on the boundary is not contained properly
qs = RasterModel.objects.filter(rast__contains_properly=GEOSGeometry('POINT (0 0)', 4326))
self.assertEqual(qs.count(), 0)
# Raster is located left of the point
qs = RasterModel.objects.filter(rast__left=GEOSGeometry('POINT (1 0)', 4326))
self.assertEqual(qs.count(), 1)
def test_lookup_with_raster_bbox(self):
rast = GDALRaster(json.loads(JSON_RASTER))
# Shift raster upwards
rast.origin.y = 2
# The raster in the model is not strictly below
qs = RasterModel.objects.filter(rast__strictly_below=rast)
self.assertEqual(qs.count(), 0)
# Shift raster further upwards
rast.origin.y = 6
# The raster in the model is strictly below
qs = RasterModel.objects.filter(rast__strictly_below=rast)
self.assertEqual(qs.count(), 1)
def test_lookup_with_polygonized_raster(self):
rast = GDALRaster(json.loads(JSON_RASTER))
# Move raster to overlap with the model point on the left side
rast.origin.x = -95.37040 + 1
rast.origin.y = 29.70486
# Raster overlaps with point in model
qs = RasterModel.objects.filter(geom__intersects=rast)
self.assertEqual(qs.count(), 1)
# Change left side of raster to be nodata values
rast.bands[0].data(data=[0, 0, 0, 1, 1], shape=(5, 1))
rast.bands[0].nodata_value = 0
qs = RasterModel.objects.filter(geom__intersects=rast)
# Raster does not overlap anymore after polygonization
# where the nodata zone is not included.
self.assertEqual(qs.count(), 0)
def test_lookup_value_error(self):
# Test with invalid dict lookup parameter
obj = dict()
msg = "Couldn't create spatial object from lookup value '%s'." % obj
with self.assertRaisesMessage(ValueError, msg):
RasterModel.objects.filter(geom__intersects=obj)
# Test with invalid string lookup parameter
obj = '00000'
msg = "Couldn't create spatial object from lookup value '%s'." % obj
with self.assertRaisesMessage(ValueError, msg):
RasterModel.objects.filter(geom__intersects=obj)
@mock.patch('django.contrib.gis.db.models.fields.HAS_GDAL', False)
class RasterFieldWithoutGDALTest(TestCase):
def test_raster_field_without_gdal_exception(self):
msg = 'RasterField requires GDAL.'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
models.OriginalRasterField()
| bsd-3-clause | 7,405,782,654,818,391,000 | 39.560117 | 119 | 0.59251 | false |