repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
DDEFISHER/servo | tests/wpt/web-platform-tests/tools/wptserve/wptserve/request.py | 136 | 16506 | import base64
import cgi
import Cookie
import os
import StringIO
import tempfile
import urlparse
import stash
from utils import HTTPException
missing = object()
class Server(object):
"""Data about the server environment
.. attribute:: config
Environment configuration information with information about the
various servers running, their hostnames and ports.
.. attribute:: stash
Stash object holding state stored on the server between requests.
"""
config = None
def __init__(self, request):
self._stash = None
self._request = request
@property
def stash(self):
if self._stash is None:
address, authkey = stash.load_env_config()
self._stash = stash.Stash(self._request.url_parts.path, address, authkey)
return self._stash
class InputFile(object):
max_buffer_size = 1024*1024
def __init__(self, rfile, length):
"""File-like object used to provide a seekable view of request body data"""
self._file = rfile
self.length = length
self._file_position = 0
if length > self.max_buffer_size:
self._buf = tempfile.TemporaryFile(mode="rw+b")
else:
self._buf = StringIO.StringIO()
@property
def _buf_position(self):
rv = self._buf.tell()
assert rv <= self._file_position
return rv
def read(self, bytes=-1):
assert self._buf_position <= self._file_position
if bytes < 0:
bytes = self.length - self._buf_position
bytes_remaining = min(bytes, self.length - self._buf_position)
if bytes_remaining == 0:
return ""
if self._buf_position != self._file_position:
buf_bytes = min(bytes_remaining, self._file_position - self._buf_position)
old_data = self._buf.read(buf_bytes)
bytes_remaining -= buf_bytes
else:
old_data = ""
assert self._buf_position == self._file_position, (
"Before reading buffer position (%i) didn't match file position (%i)" %
(self._buf_position, self._file_position))
new_data = self._file.read(bytes_remaining)
self._buf.write(new_data)
self._file_position += bytes_remaining
assert self._buf_position == self._file_position, (
"After reading buffer position (%i) didn't match file position (%i)" %
(self._buf_position, self._file_position))
return old_data + new_data
def tell(self):
return self._buf_position
def seek(self, offset):
if offset > self.length or offset < 0:
raise ValueError
if offset <= self._file_position:
self._buf.seek(offset)
else:
self.read(offset - self._file_position)
def readline(self, max_bytes=None):
if max_bytes is None:
max_bytes = self.length - self._buf_position
if self._buf_position < self._file_position:
data = self._buf.readline(max_bytes)
if data.endswith("\n") or len(data) == max_bytes:
return data
else:
data = ""
assert self._buf_position == self._file_position
initial_position = self._file_position
found = False
buf = []
max_bytes -= len(data)
while not found:
readahead = self.read(min(2, max_bytes))
max_bytes -= len(readahead)
for i, c in enumerate(readahead):
if c == "\n":
buf.append(readahead[:i+1])
found = True
break
if not found:
buf.append(readahead)
if not readahead or not max_bytes:
break
new_data = "".join(buf)
data += new_data
self.seek(initial_position + len(new_data))
return data
def readlines(self):
rv = []
while True:
data = self.readline()
if data:
rv.append(data)
else:
break
return rv
def next(self):
data = self.readline()
if data:
return data
else:
raise StopIteration
def __iter__(self):
return self
class Request(object):
"""Object representing a HTTP request.
.. attribute:: doc_root
The local directory to use as a base when resolving paths
.. attribute:: route_match
Regexp match object from matching the request path to the route
selected for the request.
.. attribute:: protocol_version
HTTP version specified in the request.
.. attribute:: method
HTTP method in the request.
.. attribute:: request_path
Request path as it appears in the HTTP request.
.. attribute:: url_base
The prefix part of the path; typically / unless the handler has a url_base set
.. attribute:: url
Absolute URL for the request.
.. attribute:: headers
List of request headers.
.. attribute:: raw_input
File-like object representing the body of the request.
.. attribute:: url_parts
Parts of the requested URL as obtained by urlparse.urlsplit(path)
.. attribute:: request_line
Raw request line
.. attribute:: headers
RequestHeaders object providing a dictionary-like representation of
the request headers.
.. attribute:: body
Request body as a string
.. attribute:: GET
MultiDict representing the parameters supplied with the request.
Note that these may be present on non-GET requests; the name is
chosen to be familiar to users of other systems such as PHP.
.. attribute:: POST
MultiDict representing the request body parameters. Most parameters
are present as string values, but file uploads have file-like
values.
.. attribute:: cookies
Cookies object representing cookies sent with the request with a
dictionary-like interface.
.. attribute:: auth
Object with username and password properties representing any
credentials supplied using HTTP authentication.
.. attribute:: server
Server object containing information about the server environment.
"""
def __init__(self, request_handler):
self.doc_root = request_handler.server.router.doc_root
self.route_match = None # Set by the router
self.protocol_version = request_handler.protocol_version
self.method = request_handler.command
scheme = request_handler.server.scheme
host = request_handler.headers.get("Host")
port = request_handler.server.server_address[1]
if host is None:
host = request_handler.server.server_address[0]
else:
if ":" in host:
host, port = host.split(":", 1)
self.request_path = request_handler.path
self.url_base = "/"
if self.request_path.startswith(scheme + "://"):
self.url = request_handler.path
else:
self.url = "%s://%s:%s%s" % (scheme,
host,
port,
self.request_path)
self.url_parts = urlparse.urlsplit(self.url)
self._raw_headers = request_handler.headers
self.request_line = request_handler.raw_requestline
self._headers = None
self.raw_input = InputFile(request_handler.rfile,
int(self.headers.get("Content-Length", 0)))
self._body = None
self._GET = None
self._POST = None
self._cookies = None
self._auth = None
self.server = Server(self)
def __repr__(self):
return "<Request %s %s>" % (self.method, self.url)
@property
def GET(self):
if self._GET is None:
params = urlparse.parse_qsl(self.url_parts.query, keep_blank_values=True)
self._GET = MultiDict()
for key, value in params:
self._GET.add(key, value)
return self._GET
@property
def POST(self):
if self._POST is None:
#Work out the post parameters
pos = self.raw_input.tell()
self.raw_input.seek(0)
fs = cgi.FieldStorage(fp=self.raw_input,
environ={"REQUEST_METHOD": self.method},
headers=self.headers,
keep_blank_values=True)
self._POST = MultiDict.from_field_storage(fs)
self.raw_input.seek(pos)
return self._POST
@property
def cookies(self):
if self._cookies is None:
parser = Cookie.BaseCookie()
cookie_headers = self.headers.get("cookie", "")
parser.load(cookie_headers)
cookies = Cookies()
for key, value in parser.iteritems():
cookies[key] = CookieValue(value)
self._cookies = cookies
return self._cookies
@property
def headers(self):
if self._headers is None:
self._headers = RequestHeaders(self._raw_headers)
return self._headers
@property
def body(self):
if self._body is None:
pos = self.raw_input.tell()
self.raw_input.seek(0)
self._body = self.raw_input.read()
self.raw_input.seek(pos)
return self._body
@property
def auth(self):
if self._auth is None:
self._auth = Authentication(self.headers)
return self._auth
class RequestHeaders(dict):
"""Dictionary-like API for accessing request headers."""
def __init__(self, items):
for key, value in zip(items.keys(), items.values()):
key = key.lower()
if key in self:
self[key].append(value)
else:
dict.__setitem__(self, key, [value])
def __getitem__(self, key):
"""Get all headers of a certain (case-insensitive) name. If there is
more than one, the values are returned comma separated"""
values = dict.__getitem__(self, key.lower())
if len(values) == 1:
return values[0]
else:
return ", ".join(values)
def __setitem__(self, name, value):
raise Exception
def get(self, key, default=None):
"""Get a string representing all headers with a particular value,
with multiple headers separated by a comma. If no header is found
return a default value
:param key: The header name to look up (case-insensitive)
:param default: The value to return in the case of no match
"""
try:
return self[key]
except KeyError:
return default
def get_list(self, key, default=missing):
"""Get all the header values for a particular field name as
a list"""
try:
return dict.__getitem__(self, key.lower())
except KeyError:
if default is not missing:
return default
else:
raise
def __contains__(self, key):
return dict.__contains__(self, key.lower())
def iteritems(self):
for item in self:
yield item, self[item]
def itervalues(self):
for item in self:
yield self[item]
class CookieValue(object):
"""Representation of cookies.
Note that cookies are considered read-only and the string value
of the cookie will not change if you update the field values.
However this is not enforced.
.. attribute:: key
The name of the cookie.
.. attribute:: value
The value of the cookie
.. attribute:: expires
The expiry date of the cookie
.. attribute:: path
The path of the cookie
.. attribute:: comment
The comment of the cookie.
.. attribute:: domain
The domain with which the cookie is associated
.. attribute:: max_age
The max-age value of the cookie.
.. attribute:: secure
Whether the cookie is marked as secure
.. attribute:: httponly
Whether the cookie is marked as httponly
"""
def __init__(self, morsel):
self.key = morsel.key
self.value = morsel.value
for attr in ["expires", "path",
"comment", "domain", "max-age",
"secure", "version", "httponly"]:
setattr(self, attr.replace("-", "_"), morsel[attr])
self._str = morsel.OutputString()
def __str__(self):
return self._str
def __repr__(self):
return self._str
def __eq__(self, other):
"""Equality comparison for cookies. Compares to other cookies
based on value alone and on non-cookies based on the equality
of self.value with the other object so that a cookie with value
"ham" compares equal to the string "ham"
"""
if hasattr(other, "value"):
return self.value == other.value
return self.value == other
class MultiDict(dict):
"""Dictionary type that holds multiple values for each
key"""
#TODO: this should perhaps also order the keys
def __init__(self):
pass
def __setitem__(self, name, value):
dict.__setitem__(self, name, [value])
def add(self, name, value):
if name in self:
dict.__getitem__(self, name).append(value)
else:
dict.__setitem__(self, name, [value])
def __getitem__(self, key):
"""Get the first value with a given key"""
#TODO: should this instead be the last value?
return self.first(key)
def first(self, key, default=missing):
"""Get the first value with a given key
:param key: The key to lookup
:param default: The default to return if key is
not found (throws if nothing is
specified)
"""
if key in self and dict.__getitem__(self, key):
return dict.__getitem__(self, key)[0]
elif default is not missing:
return default
raise KeyError
def last(self, key, default=missing):
"""Get the last value with a given key
:param key: The key to lookup
:param default: The default to return if key is
not found (throws if nothing is
specified)
"""
if key in self and dict.__getitem__(self, key):
return dict.__getitem__(self, key)[-1]
elif default is not missing:
return default
raise KeyError
def get_list(self, key):
"""Get all values with a given key as a list
:param key: The key to lookup
"""
return dict.__getitem__(self, key)
@classmethod
def from_field_storage(cls, fs):
self = cls()
if fs.list is None:
return self
for key in fs:
values = fs[key]
if not isinstance(values, list):
values = [values]
for value in values:
if value.filename:
value = value
else:
value = value.value
self.add(key, value)
return self
class Cookies(MultiDict):
"""MultiDict specialised for Cookie values"""
def __init__(self):
pass
def __getitem__(self, key):
return self.last(key)
class Authentication(object):
"""Object for dealing with HTTP Authentication
.. attribute:: username
The username supplied in the HTTP Authorization
header, or None
.. attribute:: password
The password supplied in the HTTP Authorization
header, or None
"""
def __init__(self, headers):
self.username = None
self.password = None
auth_schemes = {"Basic": self.decode_basic}
if "authorization" in headers:
header = headers.get("authorization")
auth_type, data = header.split(" ", 1)
if auth_type in auth_schemes:
self.username, self.password = auth_schemes[auth_type](data)
else:
raise HTTPException(400, "Unsupported authentication scheme %s" % auth_type)
def decode_basic(self, data):
decoded_data = base64.decodestring(data)
return decoded_data.split(":", 1)
| mpl-2.0 |
rcrowder/nupic | src/nupic/algorithms/backtracking_tm_shim.py | 6 | 9992 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
A shim for the TM class that transparently implements TemporalMemory,
for use with OPF.
"""
import numpy
from nupic.bindings.algorithms import TemporalMemory as TemporalMemoryCPP
from nupic.algorithms.monitor_mixin.temporal_memory_monitor_mixin import (
TemporalMemoryMonitorMixin)
from nupic.algorithms.temporal_memory import TemporalMemory
class MonitoredTemporalMemory(TemporalMemoryMonitorMixin,
TemporalMemory):
def __init__(self, *args, **kwargs):
TemporalMemoryMonitorMixin.__init__(self, *args, **kwargs)
TemporalMemory.__init__(self, *args, **kwargs)
@classmethod
def read(cls, proto):
"""
Intercepts TemporalMemory deserialization request in order to initialize
`TemporalMemoryMonitorMixin` state
@param proto (DynamicStructBuilder) Proto object
@return (TemporalMemory) TemporalMemory shim instance
"""
tm = super(TemporalMemoryMonitorMixin, cls).read(proto)
# initialize `TemporalMemoryMonitorMixin` attributes
tm.mmName = None
tm._mmTraces = None
tm._mmData = None
tm.mmClearHistory()
tm._mmResetActive = True
return tm
class TMShimMixin(object):
"""
TM => Temporal Memory shim class.
"""
def __init__(self,
numberOfCols=500,
cellsPerColumn=10,
initialPerm=0.11,
connectedPerm=0.50,
minThreshold=8,
newSynapseCount=15,
permanenceInc=0.10,
permanenceDec=0.10,
permanenceMax=1.0,
activationThreshold=12,
predictedSegmentDecrement=0.0,
maxSegmentsPerCell=255,
maxSynapsesPerSegment=255,
globalDecay=0.10,
maxAge=100000,
pamLength=1,
verbosity=0,
outputType="normal",
seed=42):
"""
Translate parameters and initialize member variables specific to `backtracking_tm.py`.
"""
super(TMShimMixin, self).__init__(
columnDimensions=(numberOfCols,),
cellsPerColumn=cellsPerColumn,
activationThreshold=activationThreshold,
initialPermanence=initialPerm,
connectedPermanence=connectedPerm,
minThreshold=minThreshold,
maxNewSynapseCount=newSynapseCount,
permanenceIncrement=permanenceInc,
permanenceDecrement=permanenceDec,
predictedSegmentDecrement=predictedSegmentDecrement,
maxSegmentsPerCell=maxSegmentsPerCell,
maxSynapsesPerSegment=maxSynapsesPerSegment,
seed=seed)
self.infActiveState = {"t": None}
@classmethod
def read(cls, proto):
"""
Intercepts TemporalMemory deserialization request in order to initialize
`self.infActiveState`
@param proto (DynamicStructBuilder) Proto object
@return (TemporalMemory) TemporalMemory shim instance
"""
tm = super(TMShimMixin, cls).read(proto)
tm.infActiveState = {"t": None}
return tm
def compute(self, bottomUpInput, enableLearn, computeInfOutput=None):
"""
(From `backtracking_tm.py`)
Handle one compute, possibly learning.
@param bottomUpInput The bottom-up input, typically from a spatial pooler
@param enableLearn If true, perform learning
@param computeInfOutput If None, default behavior is to disable the inference
output when enableLearn is on.
If true, compute the inference output
If false, do not compute the inference output
"""
super(TMShimMixin, self).compute(set(bottomUpInput.nonzero()[0]),
learn=enableLearn)
numberOfCells = self.numberOfCells()
activeState = numpy.zeros(numberOfCells)
activeState[self.getActiveCells()] = 1
self.infActiveState["t"] = activeState
output = numpy.zeros(numberOfCells)
output[self.getPredictiveCells()] = 1
output[self.getActiveCells()] = 1
return output
def topDownCompute(self, topDownIn=None):
"""
(From `backtracking_tm.py`)
Top-down compute - generate expected input given output of the TM
@param topDownIn top down input from the level above us
@returns best estimate of the TM input that would have generated bottomUpOut.
"""
output = numpy.zeros(self.numberOfColumns())
columns = [self.columnForCell(idx) for idx in self.getPredictiveCells()]
output[columns] = 1
return output
def getActiveState(self):
activeState = numpy.zeros(self.numberOfCells())
activeState[self.getActiveCells()] = 1
return activeState
def getPredictedState(self):
predictedState = numpy.zeros(self.numberOfCells())
predictedState[self.getPredictiveCells()] = 1
return predictedState
def getLearnActiveStateT(self):
state = numpy.zeros([self.numberOfColumns(), self.getCellsPerColumn()])
return state
class TMShim(TMShimMixin, TemporalMemory):
pass
class TMCPPShim(TMShimMixin, TemporalMemoryCPP):
pass
class MonitoredTMShim(MonitoredTemporalMemory):
"""
TM => Monitored Temporal Memory shim class.
TODO: This class is not very DRY. This whole file needs to be replaced by a
pure TemporalMemory region
(WIP at https://github.com/numenta/nupic.research/pull/247).
"""
def __init__(self,
numberOfCols=500,
cellsPerColumn=10,
initialPerm=0.11,
connectedPerm=0.50,
minThreshold=8,
newSynapseCount=15,
permanenceInc=0.10,
permanenceDec=0.10,
permanenceMax=1.0,
activationThreshold=12,
predictedSegmentDecrement=0.0,
maxSegmentsPerCell=255,
maxSynapsesPerSegment=255,
globalDecay=0.10,
maxAge=100000,
pamLength=1,
verbosity=0,
outputType="normal",
seed=42):
"""
Translate parameters and initialize member variables specific to `backtracking_tm.py`.
"""
super(MonitoredTMShim, self).__init__(
columnDimensions=(numberOfCols,),
cellsPerColumn=cellsPerColumn,
activationThreshold=activationThreshold,
initialPermanence=initialPerm,
connectedPermanence=connectedPerm,
minThreshold=minThreshold,
maxNewSynapseCount=newSynapseCount,
permanenceIncrement=permanenceInc,
permanenceDecrement=permanenceDec,
predictedSegmentDecrement=predictedSegmentDecrement,
maxSegmentsPerCell=maxSegmentsPerCell,
maxSynapsesPerSegment=maxSynapsesPerSegment,
seed=seed)
self.infActiveState = {"t": None}
@classmethod
def read(cls, proto):
"""
Intercepts TemporalMemory deserialization request in order to initialize
`self.infActiveState`
@param proto (DynamicStructBuilder) Proto object
@return (TemporalMemory) TemporalMemory shim instance
"""
tm = super(MonitoredTMShim, cls).read(proto)
tm.infActiveState = {"t": None}
return tm
def compute(self, bottomUpInput, enableLearn, computeInfOutput=None):
"""
(From `backtracking_tm.py`)
Handle one compute, possibly learning.
@param bottomUpInput The bottom-up input, typically from a spatial pooler
@param enableLearn If true, perform learning
@param computeInfOutput If None, default behavior is to disable the inference
output when enableLearn is on.
If true, compute the inference output
If false, do not compute the inference output
"""
super(MonitoredTMShim, self).compute(set(bottomUpInput.nonzero()[0]),
learn=enableLearn)
numberOfCells = self.numberOfCells()
activeState = numpy.zeros(numberOfCells)
activeState[self.getActiveCells()] = 1
self.infActiveState["t"] = activeState
output = numpy.zeros(numberOfCells)
output[self.getPredictiveCells() + self.getActiveCells()] = 1
return output
def topDownCompute(self, topDownIn=None):
"""
(From `backtracking_tm.py`)
Top-down compute - generate expected input given output of the TM
@param topDownIn top down input from the level above us
@returns best estimate of the TM input that would have generated bottomUpOut.
"""
output = numpy.zeros(self.numberOfColumns())
columns = [self.columnForCell(idx) for idx in self.getPredictiveCells()]
output[columns] = 1
return output
def getActiveState(self):
activeState = numpy.zeros(self.numberOfCells())
activeState[self.getActiveCells()] = 1
return activeState
def getPredictedState(self):
predictedState = numpy.zeros(self.numberOfCells())
predictedState[self.getPredictiveCells()] = 1
return predictedState
def getLearnActiveStateT(self):
state = numpy.zeros([self.numberOfColumns(), self.cellsPerColumn])
return state
| agpl-3.0 |
felipebetancur/scipy | doc/postprocess.py | 101 | 1442 | #!/usr/bin/env python
"""
%prog MODE FILES...
Post-processes HTML and Latex files output by Sphinx.
MODE is either 'html' or 'tex'.
"""
import re, optparse
def main():
p = optparse.OptionParser(__doc__)
options, args = p.parse_args()
if len(args) < 1:
p.error('no mode given')
mode = args.pop(0)
if mode not in ('html', 'tex'):
p.error('unknown mode %s' % mode)
for fn in args:
f = open(fn, 'r')
try:
if mode == 'html':
lines = process_html(fn, f.readlines())
elif mode == 'tex':
lines = process_tex(f.readlines())
finally:
f.close()
f = open(fn, 'w')
f.write("".join(lines))
f.close()
def process_html(fn, lines):
return lines
def process_tex(lines):
"""
Remove unnecessary section titles from the LaTeX file.
"""
new_lines = []
for line in lines:
line = re.sub(r'^\s*\\strong{See Also:}\s*$', r'\paragraph{See Also}', line)
if (line.startswith(r'\section{scipy.')
or line.startswith(r'\subsection{scipy.')
or line.startswith(r'\subsubsection{scipy.')
or line.startswith(r'\paragraph{scipy.')
or line.startswith(r'\subparagraph{scipy.')
):
pass # skip!
else:
new_lines.append(line)
return new_lines
if __name__ == "__main__":
main()
| bsd-3-clause |
atsao72/sympy | sympy/solvers/diophantine.py | 11 | 79096 | from __future__ import print_function, division
from sympy import (Poly, igcd, divisors, sign, symbols, S, Integer, Wild, Symbol, factorint,
Add, Mul, solve, ceiling, floor, sqrt, sympify, Subs, ilcm, Matrix, factor_list, perfect_power,
isprime, nextprime, integer_nthroot)
from sympy.core.function import _mexpand
from sympy.simplify.radsimp import rad_rationalize
from sympy.utilities import default_sort_key, numbered_symbols
from sympy.core.numbers import igcdex
from sympy.ntheory.residue_ntheory import sqrt_mod
from sympy.core.compatibility import range
from sympy.core.relational import Eq
from sympy.solvers.solvers import check_assumptions
__all__ = ['diophantine', 'diop_solve', 'classify_diop', 'diop_linear', 'base_solution_linear',
'diop_quadratic', 'diop_DN', 'cornacchia', 'diop_bf_DN', 'transformation_to_DN', 'find_DN',
'diop_ternary_quadratic', 'square_factor', 'descent', 'diop_general_pythagorean',
'diop_general_sum_of_squares', 'partition', 'sum_of_three_squares', 'sum_of_four_squares']
def diophantine(eq, param=symbols("t", integer=True)):
"""
Simplify the solution procedure of diophantine equation ``eq`` by
converting it into a product of terms which should equal zero.
For example, when solving, `x^2 - y^2 = 0` this is treated as
`(x + y)(x - y) = 0` and `x+y = 0` and `x-y = 0` are solved independently
and combined. Each term is solved by calling ``diop_solve()``.
Output of ``diophantine()`` is a set of tuples. Each tuple represents a
solution of the input equation. In a tuple, solution for each variable is
listed according to the alphabetic order of input variables. i.e. if we have
an equation with two variables `a` and `b`, first element of the tuple will
give the solution for `a` and the second element will give the solution for
`b`.
Usage
=====
``diophantine(eq, t)``: Solve the diophantine equation ``eq``.
``t`` is the parameter to be used by ``diop_solve()``.
Details
=======
``eq`` should be an expression which is assumed to be zero.
``t`` is the parameter to be used in the solution.
Examples
========
>>> from sympy.solvers.diophantine import diophantine
>>> from sympy.abc import x, y, z
>>> diophantine(x**2 - y**2)
set([(-t, -t), (t, -t)])
#>>> diophantine(x*(2*x + 3*y - z))
#set([(0, n1, n2), (3*t - z, -2*t + z, z)])
#>>> diophantine(x**2 + 3*x*y + 4*x)
#set([(0, n1), (3*t - 4, -t)])
See Also
========
diop_solve()
"""
if isinstance(eq, Eq):
eq = eq.lhs - eq.rhs
eq = Poly(eq).as_expr()
if not eq.is_polynomial() or eq.is_number:
raise TypeError("Equation input format not supported")
var = list(eq.expand(force=True).free_symbols)
var.sort(key=default_sort_key)
terms = factor_list(eq)[1]
sols = set([])
for term in terms:
base = term[0]
var_t, jnk, eq_type = classify_diop(base)
solution = diop_solve(base, param)
if eq_type in ["linear", "homogeneous_ternary_quadratic", "general_pythagorean"]:
if merge_solution(var, var_t, solution) != ():
sols.add(merge_solution(var, var_t, solution))
elif eq_type in ["binary_quadratic", "general_sum_of_squares", "univariate"]:
for sol in solution:
if merge_solution(var, var_t, sol) != ():
sols.add(merge_solution(var, var_t, sol))
return sols
def merge_solution(var, var_t, solution):
"""
This is used to construct the full solution from the solutions of sub
equations.
For example when solving the equation `(x - y)(x^2 + y^2 - z^2) = 0`,
solutions for each of the equations `x-y = 0` and `x^2 + y^2 - z^2` are
found independently. Solutions for `x - y = 0` are `(x, y) = (t, t)`. But
we should introduce a value for z when we output the solution for the
original equation. This function converts `(t, t)` into `(t, t, n_{1})`
where `n_{1}` is an integer parameter.
"""
l = []
if None in solution:
return ()
solution = iter(solution)
params = numbered_symbols("n", Integer=True, start=1)
for v in var:
if v in var_t:
l.append(next(solution))
else:
l.append(next(params))
for val, symb in zip(l, var):
if check_assumptions(val, **symb.assumptions0) is False:
return tuple()
return tuple(l)
def diop_solve(eq, param=symbols("t", integer=True)):
"""
Solves the diophantine equation ``eq``.
Similar to ``diophantine()`` but doesn't try to factor ``eq`` as latter
does. Uses ``classify_diop()`` to determine the type of the eqaution and
calls the appropriate solver function.
Usage
=====
``diop_solve(eq, t)``: Solve diophantine equation, ``eq`` using ``t``
as a parameter if needed.
Details
=======
``eq`` should be an expression which is assumed to be zero.
``t`` is a parameter to be used in the solution.
Examples
========
>>> from sympy.solvers.diophantine import diop_solve
>>> from sympy.abc import x, y, z, w
>>> diop_solve(2*x + 3*y - 5)
(3*t - 5, -2*t + 5)
>>> diop_solve(4*x + 3*y -4*z + 5)
(3*t + 4*z - 5, -4*t - 4*z + 5, z)
>>> diop_solve(x + 3*y - 4*z + w -6)
(t, -t - 3*y + 4*z + 6, y, z)
>>> diop_solve(x**2 + y**2 - 5)
set([(-2, -1), (-2, 1), (2, -1), (2, 1)])
See Also
========
diophantine()
"""
var, coeff, eq_type = classify_diop(eq)
if eq_type == "linear":
return _diop_linear(var, coeff, param)
elif eq_type == "binary_quadratic":
return _diop_quadratic(var, coeff, param)
elif eq_type == "homogeneous_ternary_quadratic":
x_0, y_0, z_0 = _diop_ternary_quadratic(var, coeff)
return _parametrize_ternary_quadratic((x_0, y_0, z_0), var, coeff)
elif eq_type == "general_pythagorean":
return _diop_general_pythagorean(var, coeff, param)
elif eq_type == "univariate":
l = solve(eq)
s = set([])
for soln in l:
if isinstance(soln, Integer):
s.add((soln,))
return s
elif eq_type == "general_sum_of_squares":
return _diop_general_sum_of_squares(var, coeff)
def classify_diop(eq):
"""
Helper routine used by diop_solve() to find the type of the ``eq`` etc.
Returns a tuple containing the type of the diophantine equation along with
the variables(free symbols) and their coefficients. Variables are returned
as a list and coefficients are returned as a dict with the key being the
respective term and the constant term is keyed to Integer(1). Type is an
element in the set {"linear", "binary_quadratic", "general_pythagorean",
"homogeneous_ternary_quadratic", "univariate", "general_sum_of_squares"}
Usage
=====
``classify_diop(eq)``: Return variables, coefficients and type of the
``eq``.
Details
=======
``eq`` should be an expression which is assumed to be zero.
Examples
========
>>> from sympy.solvers.diophantine import classify_diop
>>> from sympy.abc import x, y, z, w, t
>>> classify_diop(4*x + 6*y - 4)
([x, y], {1: -4, x: 4, y: 6}, 'linear')
>>> classify_diop(x + 3*y -4*z + 5)
([x, y, z], {1: 5, x: 1, y: 3, z: -4}, 'linear')
>>> classify_diop(x**2 + y**2 - x*y + x + 5)
([x, y], {1: 5, x: 1, x**2: 1, y: 0, y**2: 1, x*y: -1}, 'binary_quadratic')
"""
eq = eq.expand(force=True)
var = list(eq.free_symbols)
var.sort(key=default_sort_key)
coeff = {}
diop_type = None
coeff = dict([reversed(t.as_independent(*var)) for t in eq.args])
for v in coeff:
if not isinstance(coeff[v], Integer):
raise TypeError("Coefficients should be Integers")
if len(var) == 1:
diop_type = "univariate"
elif Poly(eq).total_degree() == 1:
diop_type = "linear"
elif Poly(eq).total_degree() == 2 and len(var) == 2:
diop_type = "binary_quadratic"
x, y = var[:2]
if isinstance(eq, Mul):
coeff = {x**2: 0, x*y: eq.args[0], y**2: 0, x: 0, y: 0, Integer(1): 0}
else:
for term in [x**2, y**2, x*y, x, y, Integer(1)]:
if term not in coeff.keys():
coeff[term] = Integer(0)
elif Poly(eq).total_degree() == 2 and len(var) == 3 and Integer(1) not in coeff.keys():
for v in var:
if v in coeff.keys():
diop_type = "inhomogeneous_ternary_quadratic"
break
else:
diop_type = "homogeneous_ternary_quadratic"
x, y, z = var[:3]
for term in [x**2, y**2, z**2, x*y, y*z, x*z]:
if term not in coeff.keys():
coeff[term] = Integer(0)
elif Poly(eq).degree() == 2 and len(var) >= 3:
for v in var:
if v in coeff.keys():
diop_type = "inhomogeneous_general_quadratic"
break
else:
if Integer(1) in coeff.keys():
constant_term = True
else:
constant_term = False
non_square_degree_2_terms = False
for v in var:
for u in var:
if u != v and u*v in coeff.keys():
non_square_degree_2_terms = True
break
if non_square_degree_2_terms:
break
if constant_term and non_square_degree_2_terms:
diop_type = "inhomogeneous_general_quadratic"
elif constant_term and not non_square_degree_2_terms:
for v in var:
if coeff[v**2] != 1:
break
else:
diop_type = "general_sum_of_squares"
elif not constant_term and non_square_degree_2_terms:
diop_type = "homogeneous_general_quadratic"
else:
coeff_sign_sum = 0
for v in var:
if not isinstance(sqrt(abs(Integer(coeff[v**2]))), Integer):
break
coeff_sign_sum = coeff_sign_sum + sign(coeff[v**2])
else:
if abs(coeff_sign_sum) == len(var) - 2 and not constant_term:
diop_type = "general_pythagorean"
elif Poly(eq).total_degree() == 3 and len(var) == 2:
x, y = var[:2]
diop_type = "cubic_thue"
for term in [x**3, x**2*y, x*y**2, y**3, Integer(1)]:
if term not in coeff.keys():
coeff[term] == Integer(0)
if diop_type is not None:
return var, coeff, diop_type
else:
raise NotImplementedError("Still not implemented")
def diop_linear(eq, param=symbols("t", integer=True)):
"""
Solves linear diophantine equations.
A linear diophantine equation is an equation of the form `a_{1}x_{1} +
a_{2}x_{2} + .. + a_{n}x_{n} = 0` where `a_{1}, a_{2}, ..a_{n}` are
integer constants and `x_{1}, x_{2}, ..x_{n}` are integer variables.
Usage
=====
``diop_linear(eq)``: Returns a tuple containing solutions to the
diophantine equation ``eq``. Values in the tuple is arranged in the same
order as the sorted variables.
Details
=======
``eq`` is a linear diophantine equation which is assumed to be zero.
``param`` is the parameter to be used in the solution.
Examples
========
>>> from sympy.solvers.diophantine import diop_linear
>>> from sympy.abc import x, y, z, t
>>> from sympy import Integer
>>> diop_linear(2*x - 3*y - 5) #solves equation 2*x - 3*y -5 = 0
(-3*t - 5, -2*t - 5)
Here x = -3*t - 5 and y = -2*t - 5
>>> diop_linear(2*x - 3*y - 4*z -3)
(-3*t - 4*z - 3, -2*t - 4*z - 3, z)
See Also
========
diop_quadratic(), diop_ternary_quadratic(), diop_general_pythagorean(),
diop_general_sum_of_squares()
"""
var, coeff, diop_type = classify_diop(eq)
if diop_type == "linear":
return _diop_linear(var, coeff, param)
def _diop_linear(var, coeff, param):
x, y = var[:2]
a = coeff[x]
b = coeff[y]
if len(var) == len(coeff):
c = 0
else:
c = -coeff[Integer(1)]
if len(var) == 2:
sol_x, sol_y = base_solution_linear(c, a, b, param)
return (sol_x, sol_y)
elif len(var) > 2:
X = []
Y = []
for v in var[2:]:
sol_x, sol_y = base_solution_linear(-coeff[v], a, b)
X.append(sol_x*v)
Y.append(sol_y*v)
sol_x, sol_y = base_solution_linear(c, a, b, param)
X.append(sol_x)
Y.append(sol_y)
l = []
if None not in X and None not in Y:
l.append(Add(*X))
l.append(Add(*Y))
for v in var[2:]:
l.append(v)
else:
for v in var:
l.append(None)
return tuple(l)
def base_solution_linear(c, a, b, t=None):
"""
Return the base solution for a linear diophantine equation with two
variables.
Used by ``diop_linear()`` to find the base solution of a linear
Diophantine equation. If ``t`` is given then the parametrized solution is
returned.
Usage
=====
``base_solution_linear(c, a, b, t)``: ``a``, ``b``, ``c`` are coefficients
in `ax + by = c` and ``t`` is the parameter to be used in the solution.
Examples
========
>>> from sympy.solvers.diophantine import base_solution_linear
>>> from sympy.abc import t
>>> base_solution_linear(5, 2, 3) # equation 2*x + 3*y = 5
(-5, 5)
>>> base_solution_linear(0, 5, 7) # equation 5*x + 7*y = 0
(0, 0)
>>> base_solution_linear(5, 2, 3, t) # equation 2*x + 3*y = 5
(3*t - 5, -2*t + 5)
>>> base_solution_linear(0, 5, 7, t) # equation 5*x + 7*y = 0
(7*t, -5*t)
"""
d = igcd(a, igcd(b, c))
a = a // d
b = b // d
c = c // d
if c == 0:
if t != None:
return (b*t , -a*t)
else:
return (S.Zero, S.Zero)
else:
x0, y0, d = extended_euclid(int(abs(a)), int(abs(b)))
x0 = x0 * sign(a)
y0 = y0 * sign(b)
if divisible(c, d):
if t != None:
return (c*x0 + b*t, c*y0 - a*t)
else:
return (Integer(c*x0), Integer(c*y0))
else:
return (None, None)
def extended_euclid(a, b):
"""
For given ``a``, ``b`` returns a tuple containing integers `x`, `y` and `d`
such that `ax + by = d`. Here `d = gcd(a, b)`.
Usage
=====
``extended_euclid(a, b)``: returns `x`, `y` and `\gcd(a, b)`.
Details
=======
``a`` Any instance of Integer.
``b`` Any instance of Integer.
Examples
========
>>> from sympy.solvers.diophantine import extended_euclid
>>> extended_euclid(4, 6)
(-1, 1, 2)
>>> extended_euclid(3, 5)
(2, -1, 1)
"""
if b == 0:
return (1, 0, a)
x0, y0, d = extended_euclid(b, a%b)
x, y = y0, x0 - (a//b) * y0
return x, y, d
def divisible(a, b):
"""
Returns `True` if ``a`` is divisible by ``b`` and `False` otherwise.
"""
return igcd(int(a), int(b)) == abs(int(b))
def diop_quadratic(eq, param=symbols("t", integer=True)):
"""
Solves quadratic diophantine equations.
i.e. equations of the form `Ax^2 + Bxy + Cy^2 + Dx + Ey + F = 0`. Returns a
set containing the tuples `(x, y)` which contains the solutions. If there
are no solutions then `(None, None)` is returned.
Usage
=====
``diop_quadratic(eq, param)``: ``eq`` is a quadratic binary diophantine
equation. ``param`` is used to indicate the parameter to be used in the
solution.
Details
=======
``eq`` should be an expression which is assumed to be zero.
``param`` is a parameter to be used in the solution.
Examples
========
>>> from sympy.abc import x, y, t
>>> from sympy.solvers.diophantine import diop_quadratic
>>> diop_quadratic(x**2 + y**2 + 2*x + 2*y + 2, t)
set([(-1, -1)])
References
==========
.. [1] Methods to solve Ax^2 + Bxy + Cy^2 + Dx + Ey + F = 0,[online],
Available: http://www.alpertron.com.ar/METHODS.HTM
.. [2] Solving the equation ax^2+ bxy + cy^2 + dx + ey + f= 0, [online],
Available: http://www.jpr2718.org/ax2p.pdf
See Also
========
diop_linear(), diop_ternary_quadratic(), diop_general_sum_of_squares(),
diop_general_pythagorean()
"""
var, coeff, diop_type = classify_diop(eq)
if diop_type == "binary_quadratic":
return _diop_quadratic(var, coeff, param)
def _diop_quadratic(var, coeff, t):
x, y = var[:2]
for term in [x**2, y**2, x*y, x, y, Integer(1)]:
if term not in coeff.keys():
coeff[term] = Integer(0)
A = coeff[x**2]
B = coeff[x*y]
C = coeff[y**2]
D = coeff[x]
E = coeff[y]
F = coeff[Integer(1)]
d = igcd(A, igcd(B, igcd(C, igcd(D, igcd(E, F)))))
A = A // d
B = B // d
C = C // d
D = D // d
E = E // d
F = F // d
# (1) Linear case: A = B = C = 0 ==> considered under linear diophantine equations
# (2) Simple-Hyperbolic case:A = C = 0, B != 0
# In this case equation can be converted to (Bx + E)(By + D) = DE - BF
# We consider two cases; DE - BF = 0 and DE - BF != 0
# More details, http://www.alpertron.com.ar/METHODS.HTM#SHyperb
l = set([])
if A == 0 and C == 0 and B != 0:
if D*E - B*F == 0:
if divisible(int(E), int(B)):
l.add((-E/B, t))
if divisible(int(D), int(B)):
l.add((t, -D/B))
else:
div = divisors(D*E - B*F)
div = div + [-term for term in div]
for d in div:
if divisible(int(d - E), int(B)):
x0 = (d - E) // B
if divisible(int(D*E - B*F), int(d)):
if divisible(int((D*E - B*F)// d - D), int(B)):
y0 = ((D*E - B*F) // d - D) // B
l.add((x0, y0))
# (3) Parabolic case: B**2 - 4*A*C = 0
# There are two subcases to be considered in this case.
# sqrt(c)D - sqrt(a)E = 0 and sqrt(c)D - sqrt(a)E != 0
# More Details, http://www.alpertron.com.ar/METHODS.HTM#Parabol
elif B**2 - 4*A*C == 0:
if A == 0:
s = _diop_quadratic([y, x], coeff, t)
for soln in s:
l.add((soln[1], soln[0]))
else:
g = igcd(A, C)
g = abs(g) * sign(A)
a = A // g
b = B // g
c = C // g
e = sign(B/A)
if e*sqrt(c)*D - sqrt(a)*E == 0:
z = symbols("z", real=True)
roots = solve(sqrt(a)*g*z**2 + D*z + sqrt(a)*F)
for root in roots:
if isinstance(root, Integer):
l.add((diop_solve(sqrt(a)*x + e*sqrt(c)*y - root)[0], diop_solve(sqrt(a)*x + e*sqrt(c)*y - root)[1]))
elif isinstance(e*sqrt(c)*D - sqrt(a)*E, Integer):
solve_x = lambda u: e*sqrt(c)*g*(sqrt(a)*E - e*sqrt(c)*D)*t**2 - (E + 2*e*sqrt(c)*g*u)*t\
- (e*sqrt(c)*g*u**2 + E*u + e*sqrt(c)*F) // (e*sqrt(c)*D - sqrt(a)*E)
solve_y = lambda u: sqrt(a)*g*(e*sqrt(c)*D - sqrt(a)*E)*t**2 + (D + 2*sqrt(a)*g*u)*t \
+ (sqrt(a)*g*u**2 + D*u + sqrt(a)*F) // (e*sqrt(c)*D - sqrt(a)*E)
for z0 in range(0, abs(e*sqrt(c)*D - sqrt(a)*E)):
if divisible(sqrt(a)*g*z0**2 + D*z0 + sqrt(a)*F, e*sqrt(c)*D - sqrt(a)*E):
l.add((solve_x(z0), solve_y(z0)))
# (4) Method used when B**2 - 4*A*C is a square, is descibed in p. 6 of the below paper
# by John P. Robertson.
# http://www.jpr2718.org/ax2p.pdf
elif isinstance(sqrt(B**2 - 4*A*C), Integer):
if A != 0:
r = sqrt(B**2 - 4*A*C)
u, v = symbols("u, v", integer=True)
eq = _mexpand(4*A*r*u*v + 4*A*D*(B*v + r*u + r*v - B*u) + 2*A*4*A*E*(u - v) + 4*A*r*4*A*F)
sol = diop_solve(eq, t)
sol = list(sol)
for solution in sol:
s0 = solution[0]
t0 = solution[1]
x_0 = S(B*t0 + r*s0 + r*t0 - B*s0)/(4*A*r)
y_0 = S(s0 - t0)/(2*r)
if isinstance(s0, Symbol) or isinstance(t0, Symbol):
if check_param(x_0, y_0, 4*A*r, t) != (None, None):
l.add((check_param(x_0, y_0, 4*A*r, t)[0], check_param(x_0, y_0, 4*A*r, t)[1]))
elif divisible(B*t0 + r*s0 + r*t0 - B*s0, 4*A*r):
if divisible(s0 - t0, 2*r):
if is_solution_quad(var, coeff, x_0, y_0):
l.add((x_0, y_0))
else:
_var = var
_var[0], _var[1] = _var[1], _var[0] # Interchange x and y
s = _diop_quadratic(_var, coeff, t)
while len(s) > 0:
sol = s.pop()
l.add((sol[1], sol[0]))
# (5) B**2 - 4*A*C > 0 and B**2 - 4*A*C not a square or B**2 - 4*A*C < 0
else:
P, Q = _transformation_to_DN(var, coeff)
D, N = _find_DN(var, coeff)
solns_pell = diop_DN(D, N)
if D < 0:
for solution in solns_pell:
for X_i in [-solution[0], solution[0]]:
for Y_i in [-solution[1], solution[1]]:
x_i, y_i = (P*Matrix([X_i, Y_i]) + Q)[0], (P*Matrix([X_i, Y_i]) + Q)[1]
if isinstance(x_i, Integer) and isinstance(y_i, Integer):
l.add((x_i, y_i))
else:
# In this case equation can be transformed into a Pell equation
#n = symbols("n", integer=True)
fund_solns = solns_pell
solns_pell = set(fund_solns)
for X, Y in fund_solns:
solns_pell.add((-X, -Y))
a = diop_DN(D, 1)
T = a[0][0]
U = a[0][1]
if (isinstance(P[0], Integer) and isinstance(P[1], Integer) and isinstance(P[2], Integer)
and isinstance(P[3], Integer) and isinstance(Q[0], Integer) and isinstance(Q[1], Integer)):
for sol in solns_pell:
r = sol[0]
s = sol[1]
x_n = S((r + s*sqrt(D))*(T + U*sqrt(D))**t + (r - s*sqrt(D))*(T - U*sqrt(D))**t)/2
y_n = S((r + s*sqrt(D))*(T + U*sqrt(D))**t - (r - s*sqrt(D))*(T - U*sqrt(D))**t)/(2*sqrt(D))
x_n = _mexpand(x_n)
y_n = _mexpand(y_n)
x_n, y_n = (P*Matrix([x_n, y_n]) + Q)[0], (P*Matrix([x_n, y_n]) + Q)[1]
l.add((x_n, y_n))
else:
L = ilcm(S(P[0]).q, ilcm(S(P[1]).q, ilcm(S(P[2]).q,
ilcm(S(P[3]).q, ilcm(S(Q[0]).q, S(Q[1]).q)))))
k = 1
T_k = T
U_k = U
while (T_k - 1) % L != 0 or U_k % L != 0:
T_k, U_k = T_k*T + D*U_k*U, T_k*U + U_k*T
k += 1
for X, Y in solns_pell:
for i in range(k):
Z = P*Matrix([X, Y]) + Q
x, y = Z[0], Z[1]
if isinstance(x, Integer) and isinstance(y, Integer):
Xt = S((X + sqrt(D)*Y)*(T_k + sqrt(D)*U_k)**t +
(X - sqrt(D)*Y)*(T_k - sqrt(D)*U_k)**t)/ 2
Yt = S((X + sqrt(D)*Y)*(T_k + sqrt(D)*U_k)**t -
(X - sqrt(D)*Y)*(T_k - sqrt(D)*U_k)**t)/ (2*sqrt(D))
Zt = P*Matrix([Xt, Yt]) + Q
l.add((Zt[0], Zt[1]))
X, Y = X*T + D*U*Y, X*U + Y*T
return l
def is_solution_quad(var, coeff, u, v):
"""
Check whether `(u, v)` is solution to the quadratic binary diophantine
equation with the variable list ``var`` and coefficient dictionary
``coeff``.
Not intended for use by normal users.
"""
x, y = var[:2]
eq = x**2*coeff[x**2] + x*y*coeff[x*y] + y**2*coeff[y**2] + x*coeff[x] + y*coeff[y] + coeff[Integer(1)]
return _mexpand(Subs(eq, (x, y), (u, v)).doit()) == 0
def diop_DN(D, N, t=symbols("t", integer=True)):
"""
Solves the equation `x^2 - Dy^2 = N`.
Mainly concerned in the case `D > 0, D` is not a perfect square, which is
the same as generalized Pell equation. To solve the generalized Pell
equation this function Uses LMM algorithm. Refer [1]_ for more details on
the algorithm.
Returns one solution for each class of the solutions. Other solutions of
the class can be constructed according to the values of ``D`` and ``N``.
Returns a list containing the solution tuples `(x, y)`.
Usage
=====
``diop_DN(D, N, t)``: D and N are integers as in `x^2 - Dy^2 = N` and
``t`` is the parameter to be used in the solutions.
Details
=======
``D`` and ``N`` correspond to D and N in the equation.
``t`` is the parameter to be used in the solutions.
Examples
========
>>> from sympy.solvers.diophantine import diop_DN
>>> diop_DN(13, -4) # Solves equation x**2 - 13*y**2 = -4
[(3, 1), (393, 109), (36, 10)]
The output can be interpreted as follows: There are three fundamental
solutions to the equation `x^2 - 13y^2 = -4` given by (3, 1), (393, 109)
and (36, 10). Each tuple is in the form (x, y), i. e solution (3, 1) means
that `x = 3` and `y = 1`.
>>> diop_DN(986, 1) # Solves equation x**2 - 986*y**2 = 1
[(49299, 1570)]
See Also
========
find_DN(), diop_bf_DN()
References
==========
.. [1] Solving the generalized Pell equation x**2 - D*y**2 = N, John P.
Robertson, July 31, 2004, Pages 16 - 17. [online], Available:
http://www.jpr2718.org/pell.pdf
"""
if D < 0:
if N == 0:
return [(S.Zero, S.Zero)]
elif N < 0:
return []
elif N > 0:
d = divisors(square_factor(N))
sol = []
for divisor in d:
sols = cornacchia(1, -D, N // divisor**2)
if sols:
for x, y in sols:
sol.append((divisor*x, divisor*y))
return sol
elif D == 0:
if N < 0 or not isinstance(sqrt(N), Integer):
return []
if N == 0:
return [(S.Zero, t)]
if isinstance(sqrt(N), Integer):
return [(sqrt(N), t)]
else: # D > 0
if isinstance(sqrt(D), Integer):
r = sqrt(D)
if N == 0:
return [(r*t, t)]
else:
sol = []
for y in range(floor(sign(N)*(N - 1)/(2*r)) + 1):
if isinstance(sqrt(D*y**2 + N), Integer):
sol.append((sqrt(D*y**2 + N), y))
return sol
else:
if N == 0:
return [(S.Zero, S.Zero)]
elif abs(N) == 1:
pqa = PQa(0, 1, D)
a_0 = floor(sqrt(D))
l = 0
G = []
B = []
for i in pqa:
a = i[2]
G.append(i[5])
B.append(i[4])
if l != 0 and a == 2*a_0:
break
l = l + 1
if l % 2 == 1:
if N == -1:
x = G[l-1]
y = B[l-1]
else:
count = l
while count < 2*l - 1:
i = next(pqa)
G.append(i[5])
B.append(i[4])
count = count + 1
x = G[count]
y = B[count]
else:
if N == 1:
x = G[l-1]
y = B[l-1]
else:
return []
return [(x, y)]
else:
fs = []
sol = []
div = divisors(N)
for d in div:
if divisible(N, d**2):
fs.append(d)
for f in fs:
m = N // f**2
zs = sqrt_mod(D, abs(m), True)
zs = [i for i in zs if i <= abs(m) // 2 ]
if abs(m) != 2:
zs = zs + [-i for i in zs]
if S.Zero in zs:
zs.remove(S.Zero) # Remove duplicate zero
for z in zs:
pqa = PQa(z, abs(m), D)
l = 0
G = []
B = []
for i in pqa:
a = i[2]
G.append(i[5])
B.append(i[4])
if l != 0 and abs(i[1]) == 1:
r = G[l-1]
s = B[l-1]
if r**2 - D*s**2 == m:
sol.append((f*r, f*s))
elif diop_DN(D, -1) != []:
a = diop_DN(D, -1)
sol.append((f*(r*a[0][0] + a[0][1]*s*D), f*(r*a[0][1] + s*a[0][0])))
break
l = l + 1
if l == length(z, abs(m), D):
break
return sol
def cornacchia(a, b, m):
"""
Solves `ax^2 + by^2 = m` where `\gcd(a, b) = 1 = gcd(a, m)` and `a, b > 0`.
Uses the algorithm due to Cornacchia. The method only finds primitive
solutions, i.e. ones with `\gcd(x, y) = 1`. So this method can't be used to
find the solutions of `x^2 + y^2 = 20` since the only solution to former is
`(x,y) = (4, 2)` and it is not primitive. When ` a = b = 1`, only the
solutions with `x \geq y` are found. For more details, see the References.
Examples
========
>>> from sympy.solvers.diophantine import cornacchia
>>> cornacchia(2, 3, 35) # equation 2x**2 + 3y**2 = 35
set([(2, 3), (4, 1)])
>>> cornacchia(1, 1, 25) # equation x**2 + y**2 = 25
set([(4, 3)])
References
===========
.. [1] A. Nitaj, "L'algorithme de Cornacchia"
.. [2] Solving the diophantine equation ax**2 + by**2 = m by Cornacchia's
method, [online], Available:
http://www.numbertheory.org/php/cornacchia.html
"""
sols = set([])
a1 = igcdex(a, m)[0]
v = sqrt_mod(-b*a1, m, True)
if v is None:
return None
if not isinstance(v, list):
v = [v]
for t in v:
if t < m // 2:
continue
u, r = t, m
while True:
u, r = r, u % r
if a*r**2 < m:
break
m1 = m - a*r**2
if m1 % b == 0:
m1 = m1 // b
if isinstance(sqrt(m1), Integer):
s = sqrt(m1)
sols.add((int(r), int(s)))
return sols
def PQa(P_0, Q_0, D):
"""
Returns useful information needed to solve the Pell equation.
There are six sequences of integers defined related to the continued
fraction representation of `\\frac{P + \sqrt{D}}{Q}`, namely {`P_{i}`},
{`Q_{i}`}, {`a_{i}`},{`A_{i}`}, {`B_{i}`}, {`G_{i}`}. ``PQa()`` Returns
these values as a 6-tuple in the same order as mentioned above. Refer [1]_
for more detailed information.
Usage
=====
``PQa(P_0, Q_0, D)``: ``P_0``, ``Q_0`` and ``D`` are integers corresponding
to `P_{0}`, `Q_{0}` and `D` in the continued fraction
`\\frac{P_{0} + \sqrt{D}}{Q_{0}}`.
Also it's assumed that `P_{0}^2 == D mod(|Q_{0}|)` and `D` is square free.
Examples
========
>>> from sympy.solvers.diophantine import PQa
>>> pqa = PQa(13, 4, 5) # (13 + sqrt(5))/4
>>> next(pqa) # (P_0, Q_0, a_0, A_0, B_0, G_0)
(13, 4, 3, 3, 1, -1)
>>> next(pqa) # (P_1, Q_1, a_1, A_1, B_1, G_1)
(-1, 1, 1, 4, 1, 3)
References
==========
.. [1] Solving the generalized Pell equation x^2 - Dy^2 = N, John P.
Robertson, July 31, 2004, Pages 4 - 8. http://www.jpr2718.org/pell.pdf
"""
A_i_2 = B_i_1 = 0
A_i_1 = B_i_2 = 1
G_i_2 = -P_0
G_i_1 = Q_0
P_i = P_0
Q_i = Q_0
while(1):
a_i = floor((P_i + sqrt(D))/Q_i)
A_i = a_i*A_i_1 + A_i_2
B_i = a_i*B_i_1 + B_i_2
G_i = a_i*G_i_1 + G_i_2
yield P_i, Q_i, a_i, A_i, B_i, G_i
A_i_1, A_i_2 = A_i, A_i_1
B_i_1, B_i_2 = B_i, B_i_1
G_i_1, G_i_2 = G_i, G_i_1
P_i = a_i*Q_i - P_i
Q_i = (D - P_i**2)/Q_i
def diop_bf_DN(D, N, t=symbols("t", integer=True)):
"""
Uses brute force to solve the equation, `x^2 - Dy^2 = N`.
Mainly concerned with the generalized Pell equation which is the case when
`D > 0, D` is not a perfect square. For more information on the case refer
[1]_. Let `(t, u)` be the minimal positive solution of the equation
`x^2 - Dy^2 = 1`. Then this method requires
`\sqrt{\\frac{\mid N \mid (t \pm 1)}{2D}}` to be small.
Usage
=====
``diop_bf_DN(D, N, t)``: ``D`` and ``N`` are coefficients in
`x^2 - Dy^2 = N` and ``t`` is the parameter to be used in the solutions.
Details
=======
``D`` and ``N`` correspond to D and N in the equation.
``t`` is the parameter to be used in the solutions.
Examples
========
>>> from sympy.solvers.diophantine import diop_bf_DN
>>> diop_bf_DN(13, -4)
[(3, 1), (-3, 1), (36, 10)]
>>> diop_bf_DN(986, 1)
[(49299, 1570)]
See Also
========
diop_DN()
References
==========
.. [1] Solving the generalized Pell equation x**2 - D*y**2 = N, John P.
Robertson, July 31, 2004, Page 15. http://www.jpr2718.org/pell.pdf
"""
sol = []
a = diop_DN(D, 1)
u = a[0][0]
v = a[0][1]
if abs(N) == 1:
return diop_DN(D, N)
elif N > 1:
L1 = 0
L2 = floor(sqrt(S(N*(u - 1))/(2*D))) + 1
elif N < -1:
L1 = ceiling(sqrt(S(-N)/D))
L2 = floor(sqrt(S(-N*(u + 1))/(2*D))) + 1
else:
if D < 0:
return [(S.Zero, S.Zero)]
elif D == 0:
return [(S.Zero, t)]
else:
if isinstance(sqrt(D), Integer):
return [(sqrt(D)*t, t), (-sqrt(D)*t, t)]
else:
return [(S.Zero, S.Zero)]
for y in range(L1, L2):
if isinstance(sqrt(N + D*y**2), Integer):
x = sqrt(N + D*y**2)
sol.append((x, y))
if not equivalent(x, y, -x, y, D, N):
sol.append((-x, y))
return sol
def equivalent(u, v, r, s, D, N):
"""
Returns True if two solutions `(u, v)` and `(r, s)` of `x^2 - Dy^2 = N`
belongs to the same equivalence class and False otherwise.
Two solutions `(u, v)` and `(r, s)` to the above equation fall to the same
equivalence class iff both `(ur - Dvs)` and `(us - vr)` are divisible by
`N`. See reference [1]_. No test is performed to test whether `(u, v)` and
`(r, s)` are actually solutions to the equation. User should take care of
this.
Usage
=====
``equivalent(u, v, r, s, D, N)``: `(u, v)` and `(r, s)` are two solutions
of the equation `x^2 - Dy^2 = N` and all parameters involved are integers.
Examples
========
>>> from sympy.solvers.diophantine import equivalent
>>> equivalent(18, 5, -18, -5, 13, -1)
True
>>> equivalent(3, 1, -18, 393, 109, -4)
False
References
==========
.. [1] Solving the generalized Pell equation x**2 - D*y**2 = N, John P.
Robertson, July 31, 2004, Page 12. http://www.jpr2718.org/pell.pdf
"""
return divisible(u*r - D*v*s, N) and divisible(u*s - v*r, N)
def length(P, Q, D):
"""
Returns the (length of aperiodic part + length of periodic part) of
continued fraction representation of `\\frac{P + \sqrt{D}}{Q}`.
It is important to remember that this does NOT return the length of the
periodic part but the addition of the legths of the two parts as mentioned
above.
Usage
=====
``length(P, Q, D)``: ``P``, ``Q`` and ``D`` are integers corresponding to
the continued fraction `\\frac{P + \sqrt{D}}{Q}`.
Details
=======
``P``, ``D`` and ``Q`` corresponds to P, D and Q in the continued fraction,
`\\frac{P + \sqrt{D}}{Q}`.
Examples
========
>>> from sympy.solvers.diophantine import length
>>> length(-2 , 4, 5) # (-2 + sqrt(5))/4
3
>>> length(-5, 4, 17) # (-5 + sqrt(17))/4
4
"""
x = P + sqrt(D)
y = Q
x = sympify(x)
v, res = [], []
q = x/y
if q < 0:
v.append(q)
res.append(floor(q))
q = q - floor(q)
num, den = rad_rationalize(1, q)
q = num / den
while 1:
v.append(q)
a = int(q)
res.append(a)
if q == a:
return len(res)
num, den = rad_rationalize(1,(q - a))
q = num / den
if q in v:
return len(res)
def transformation_to_DN(eq):
"""
This function transforms general quadratic,
`ax^2 + bxy + cy^2 + dx + ey + f = 0`
to more easy to deal with `X^2 - DY^2 = N` form.
This is used to solve the general quadratic equation by transforming it to
the latter form. Refer [1]_ for more detailed information on the
transformation. This function returns a tuple (A, B) where A is a 2 X 2
matrix and B is a 2 X 1 matrix such that,
Transpose([x y]) = A * Transpose([X Y]) + B
Usage
=====
``transformation_to_DN(eq)``: where ``eq`` is the quadratic to be
transformed.
Examples
========
>>> from sympy.abc import x, y
>>> from sympy.solvers.diophantine import transformation_to_DN
>>> from sympy.solvers.diophantine import classify_diop
>>> A, B = transformation_to_DN(x**2 - 3*x*y - y**2 - 2*y + 1)
>>> A
Matrix([
[1/26, 3/26],
[ 0, 1/13]])
>>> B
Matrix([
[-6/13],
[-4/13]])
A, B returned are such that Transpose((x y)) = A * Transpose((X Y)) + B.
Substituting these values for `x` and `y` and a bit of simplifying work
will give an equation of the form `x^2 - Dy^2 = N`.
>>> from sympy.abc import X, Y
>>> from sympy import Matrix, simplify, Subs
>>> u = (A*Matrix([X, Y]) + B)[0] # Transformation for x
>>> u
X/26 + 3*Y/26 - 6/13
>>> v = (A*Matrix([X, Y]) + B)[1] # Transformation for y
>>> v
Y/13 - 4/13
Next we will substitute these formulas for `x` and `y` and do
``simplify()``.
>>> eq = simplify(Subs(x**2 - 3*x*y - y**2 - 2*y + 1, (x, y), (u, v)).doit())
>>> eq
X**2/676 - Y**2/52 + 17/13
By multiplying the denominator appropriately, we can get a Pell equation
in the standard form.
>>> eq * 676
X**2 - 13*Y**2 + 884
If only the final equation is needed, ``find_DN()`` can be used.
See Also
========
find_DN()
References
==========
.. [1] Solving the equation ax^2 + bxy + cy^2 + dx + ey + f = 0,
John P.Robertson, May 8, 2003, Page 7 - 11.
http://www.jpr2718.org/ax2p.pdf
"""
var, coeff, diop_type = classify_diop(eq)
if diop_type == "binary_quadratic":
return _transformation_to_DN(var, coeff)
def _transformation_to_DN(var, coeff):
x, y = var[:2]
a = coeff[x**2]
b = coeff[x*y]
c = coeff[y**2]
d = coeff[x]
e = coeff[y]
f = coeff[Integer(1)]
g = igcd(a, igcd(b, igcd(c, igcd(d, igcd(e, f)))))
a = a // g
b = b // g
c = c // g
d = d // g
e = e // g
f = f // g
X, Y = symbols("X, Y", integer=True)
if b != Integer(0):
B = (S(2*a)/b).p
C = (S(2*a)/b).q
A = (S(a)/B**2).p
T = (S(a)/B**2).q
# eq_1 = A*B*X**2 + B*(c*T - A*C**2)*Y**2 + d*T*X + (B*e*T - d*T*C)*Y + f*T*B
coeff = {X**2: A*B, X*Y: 0, Y**2: B*(c*T - A*C**2), X: d*T, Y: B*e*T - d*T*C, Integer(1): f*T*B}
A_0, B_0 = _transformation_to_DN([X, Y], coeff)
return Matrix(2, 2, [S(1)/B, -S(C)/B, 0, 1])*A_0, Matrix(2, 2, [S(1)/B, -S(C)/B, 0, 1])*B_0
else:
if d != Integer(0):
B = (S(2*a)/d).p
C = (S(2*a)/d).q
A = (S(a)/B**2).p
T = (S(a)/B**2).q
# eq_2 = A*X**2 + c*T*Y**2 + e*T*Y + f*T - A*C**2
coeff = {X**2: A, X*Y: 0, Y**2: c*T, X: 0, Y: e*T, Integer(1): f*T - A*C**2}
A_0, B_0 = _transformation_to_DN([X, Y], coeff)
return Matrix(2, 2, [S(1)/B, 0, 0, 1])*A_0, Matrix(2, 2, [S(1)/B, 0, 0, 1])*B_0 + Matrix([-S(C)/B, 0])
else:
if e != Integer(0):
B = (S(2*c)/e).p
C = (S(2*c)/e).q
A = (S(c)/B**2).p
T = (S(c)/B**2).q
# eq_3 = a*T*X**2 + A*Y**2 + f*T - A*C**2
coeff = {X**2: a*T, X*Y: 0, Y**2: A, X: 0, Y: 0, Integer(1): f*T - A*C**2}
A_0, B_0 = _transformation_to_DN([X, Y], coeff)
return Matrix(2, 2, [1, 0, 0, S(1)/B])*A_0, Matrix(2, 2, [1, 0, 0, S(1)/B])*B_0 + Matrix([0, -S(C)/B])
else:
# TODO: pre-simplification: Not necessary but may simplify
# the equation.
return Matrix(2, 2, [S(1)/a, 0, 0, 1]), Matrix([0, 0])
def find_DN(eq):
"""
This function returns a tuple, `(D, N)` of the simplified form,
`x^2 - Dy^2 = N`, corresponding to the general quadratic,
`ax^2 + bxy + cy^2 + dx + ey + f = 0`.
Solving the general quadratic is then equivalent to solving the equation
`X^2 - DY^2 = N` and transforming the solutions by using the transformation
matrices returned by ``transformation_to_DN()``.
Usage
=====
``find_DN(eq)``: where ``eq`` is the quadratic to be transformed.
Examples
========
>>> from sympy.abc import x, y
>>> from sympy.solvers.diophantine import find_DN
>>> find_DN(x**2 - 3*x*y - y**2 - 2*y + 1)
(13, -884)
Interpretation of the output is that we get `X^2 -13Y^2 = -884` after
transforming `x^2 - 3xy - y^2 - 2y + 1` using the transformation returned
by ``transformation_to_DN()``.
See Also
========
transformation_to_DN()
References
==========
.. [1] Solving the equation ax^2 + bxy + cy^2 + dx + ey + f = 0,
John P.Robertson, May 8, 2003, Page 7 - 11.
http://www.jpr2718.org/ax2p.pdf
"""
var, coeff, diop_type = classify_diop(eq)
if diop_type == "binary_quadratic":
return _find_DN(var, coeff)
def _find_DN(var, coeff):
x, y = var[:2]
X, Y = symbols("X, Y", integer=True)
A , B = _transformation_to_DN(var, coeff)
u = (A*Matrix([X, Y]) + B)[0]
v = (A*Matrix([X, Y]) + B)[1]
eq = x**2*coeff[x**2] + x*y*coeff[x*y] + y**2*coeff[y**2] + x*coeff[x] + y*coeff[y] + coeff[Integer(1)]
simplified = _mexpand(Subs(eq, (x, y), (u, v)).doit())
coeff = dict([reversed(t.as_independent(*[X, Y])) for t in simplified.args])
for term in [X**2, Y**2, Integer(1)]:
if term not in coeff.keys():
coeff[term] = Integer(0)
return -coeff[Y**2]/coeff[X**2], -coeff[Integer(1)]/coeff[X**2]
def check_param(x, y, a, t):
"""
Check if there is a number modulo ``a`` such that ``x`` and ``y`` are both
integers. If exist, then find a parametric representation for ``x`` and
``y``.
Here ``x`` and ``y`` are functions of ``t``.
"""
k, m, n = symbols("k, m, n", integer=True)
p = Wild("p", exclude=[k])
q = Wild("q", exclude=[k])
ok = False
for i in range(a):
z_x = _mexpand(Subs(x, t, a*k + i).doit()).match(p*k + q)
z_y = _mexpand(Subs(y, t, a*k + i).doit()).match(p*k + q)
if (isinstance(z_x[p], Integer) and isinstance(z_x[q], Integer) and
isinstance(z_y[p], Integer) and isinstance(z_y[q], Integer)):
ok = True
break
if ok == True:
x_param = x.match(p*t + q)
y_param = y.match(p*t + q)
if x_param[p] == 0 or y_param[p] == 0:
if x_param[p] == 0:
l1, junk = Poly(y).clear_denoms()
else:
l1 = 1
if y_param[p] == 0:
l2, junk = Poly(x).clear_denoms()
else:
l2 = 1
return x*ilcm(l1, l2), y*ilcm(l1, l2)
eq = S(m - x_param[q])/x_param[p] - S(n - y_param[q])/y_param[p]
lcm_denom, junk = Poly(eq).clear_denoms()
eq = eq * lcm_denom
return diop_solve(eq, t)[0], diop_solve(eq, t)[1]
else:
return (None, None)
def diop_ternary_quadratic(eq):
"""
Solves the general quadratic ternary form,
`ax^2 + by^2 + cz^2 + fxy + gyz + hxz = 0`.
Returns a tuple `(x, y, z)` which is a base solution for the above
equation. If there are no solutions, `(None, None, None)` is returned.
Usage
=====
``diop_ternary_quadratic(eq)``: Return a tuple containing a basic solution
to ``eq``.
Details
=======
``eq`` should be an homogeneous expression of degree two in three variables
and it is assumed to be zero.
Examples
========
>>> from sympy.abc import x, y, z
>>> from sympy.solvers.diophantine import diop_ternary_quadratic
>>> diop_ternary_quadratic(x**2 + 3*y**2 - z**2)
(1, 0, 1)
>>> diop_ternary_quadratic(4*x**2 + 5*y**2 - z**2)
(1, 0, 2)
>>> diop_ternary_quadratic(45*x**2 - 7*y**2 - 8*x*y - z**2)
(28, 45, 105)
>>> diop_ternary_quadratic(x**2 - 49*y**2 - z**2 + 13*z*y -8*x*y)
(9, 1, 5)
"""
var, coeff, diop_type = classify_diop(eq)
if diop_type == "homogeneous_ternary_quadratic":
return _diop_ternary_quadratic(var, coeff)
def _diop_ternary_quadratic(_var, coeff):
x, y, z = _var[:3]
var = [x]*3
var[0], var[1], var[2] = _var[0], _var[1], _var[2]
# Equations of the form B*x*y + C*z*x + E*y*z = 0 and At least two of the
# coefficients A, B, C are non-zero.
# There are infinitely many solutions for the equation.
# Ex: (0, 0, t), (0, t, 0), (t, 0, 0)
# Equation can be re-written as y*(B*x + E*z) = -C*x*z and we can find rather
# unobviuos solutions. Set y = -C and B*x + E*z = x*z. The latter can be solved by
# using methods for binary quadratic diophantine equations. Let's select the
# solution which minimizes |x| + |z|
if coeff[x**2] == 0 and coeff[y**2] == 0 and coeff[z**2] == 0:
if coeff[x*z] != 0:
sols = diophantine(coeff[x*y]*x + coeff[y*z]*z - x*z)
s = sols.pop()
min_sum = abs(s[0]) + abs(s[1])
for r in sols:
if abs(r[0]) + abs(r[1]) < min_sum:
s = r
min_sum = abs(s[0]) + abs(s[1])
x_0, y_0, z_0 = s[0], -coeff[x*z], s[1]
else:
var[0], var[1] = _var[1], _var[0]
y_0, x_0, z_0 = _diop_ternary_quadratic(var, coeff)
return simplified(x_0, y_0, z_0)
if coeff[x**2] == 0:
# If the coefficient of x is zero change the variables
if coeff[y**2] == 0:
var[0], var[2] = _var[2], _var[0]
z_0, y_0, x_0 = _diop_ternary_quadratic(var, coeff)
else:
var[0], var[1] = _var[1], _var[0]
y_0, x_0, z_0 = _diop_ternary_quadratic(var, coeff)
else:
if coeff[x*y] != 0 or coeff[x*z] != 0:
# Apply the transformation x --> X - (B*y + C*z)/(2*A)
A = coeff[x**2]
B = coeff[x*y]
C = coeff[x*z]
D = coeff[y**2]
E = coeff[y*z]
F = coeff[z**2]
_coeff = dict()
_coeff[x**2] = 4*A**2
_coeff[y**2] = 4*A*D - B**2
_coeff[z**2] = 4*A*F - C**2
_coeff[y*z] = 4*A*E - 2*B*C
_coeff[x*y] = 0
_coeff[x*z] = 0
X_0, y_0, z_0 = _diop_ternary_quadratic(var, _coeff)
if X_0 == None:
return (None, None, None)
l = (S(B*y_0 + C*z_0)/(2*A)).q
x_0, y_0, z_0 = X_0*l - (S(B*y_0 + C*z_0)/(2*A)).p, y_0*l, z_0*l
elif coeff[z*y] != 0:
if coeff[y**2] == 0:
if coeff[z**2] == 0:
# Equations of the form A*x**2 + E*yz = 0.
A = coeff[x**2]
E = coeff[y*z]
b = (S(-E)/A).p
a = (S(-E)/A).q
x_0, y_0, z_0 = b, a, b
else:
# Ax**2 + E*y*z + F*z**2 = 0
var[0], var[2] = _var[2], _var[0]
z_0, y_0, x_0 = _diop_ternary_quadratic(var, coeff)
else:
# A*x**2 + D*y**2 + E*y*z + F*z**2 = 0, C may be zero
var[0], var[1] = _var[1], _var[0]
y_0, x_0, z_0 = _diop_ternary_quadratic(var, coeff)
else:
# Ax**2 + D*y**2 + F*z**2 = 0, C may be zero
x_0, y_0, z_0 = _diop_ternary_quadratic_normal(var, coeff)
return simplified(x_0, y_0, z_0)
def transformation_to_normal(eq):
"""
Returns the transformation Matrix from general ternary quadratic equation
`eq` to normal form.
General form of the ternary quadratic equation is `ax^2 + by^2 cz^2 + dxy +
eyz + fxz`. This function returns a 3X3 transformation Matrix which
transforms the former equation to the form `ax^2 + by^2 + cz^2 = 0`. This
is not used in solving ternary quadratics. Only implemented for the sake
of completeness.
"""
var, coeff, diop_type = classify_diop(eq)
if diop_type == "homogeneous_ternary_quadratic":
return _transformation_to_normal(var, coeff)
def _transformation_to_normal(var, coeff):
_var = [var[0]]*3
_var[1], _var[2] = var[1], var[2]
x, y, z = var[:3]
if coeff[x**2] == 0:
# If the coefficient of x is zero change the variables
if coeff[y**2] == 0:
_var[0], _var[2] = var[2], var[0]
T = _transformation_to_normal(_var, coeff)
T.row_swap(0, 2)
T.col_swap(0, 2)
return T
else:
_var[0], _var[1] = var[1], var[0]
T = _transformation_to_normal(_var, coeff)
T.row_swap(0, 1)
T.col_swap(0, 1)
return T
else:
# Apply the transformation x --> X - (B*Y + C*Z)/(2*A)
if coeff[x*y] != 0 or coeff[x*z] != 0:
A = coeff[x**2]
B = coeff[x*y]
C = coeff[x*z]
D = coeff[y**2]
E = coeff[y*z]
F = coeff[z**2]
_coeff = dict()
_coeff[x**2] = 4*A**2
_coeff[y**2] = 4*A*D - B**2
_coeff[z**2] = 4*A*F - C**2
_coeff[y*z] = 4*A*E - 2*B*C
_coeff[x*y] = 0
_coeff[x*z] = 0
T_0 = _transformation_to_normal(_var, _coeff)
return Matrix(3, 3, [1, S(-B)/(2*A), S(-C)/(2*A), 0, 1, 0, 0, 0, 1]) * T_0
elif coeff[y*z] != 0:
if coeff[y**2] == 0:
if coeff[z**2] == 0:
# Equations of the form A*x**2 + E*yz = 0.
# Apply transformation y -> Y + Z ans z -> Y - Z
return Matrix(3, 3, [1, 0, 0, 0, 1, 1, 0, 1, -1])
else:
# Ax**2 + E*y*z + F*z**2 = 0
_var[0], _var[2] = var[2], var[0]
T = _transformtion_to_normal(_var, coeff)
T.row_swap(0, 2)
T.col_swap(0, 2)
return T
else:
# A*x**2 + D*y**2 + E*y*z + F*z**2 = 0, F may be zero
_var[0], _var[1] = var[1], var[0]
T = _transformation_to_normal(_var, coeff)
T.row_swap(0, 1)
T.col_swap(0, 1)
return T
else:
return Matrix(3, 3, [1, 0, 0, 0, 1, 0, 0, 0, 1])
def simplified(x, y, z):
"""
Simplify the solution `(x, y, z)`.
"""
if x == None or y == None or z == None:
return (x, y, z)
g = igcd(x, igcd(y, z))
return x // g, y // g, z // g
def parametrize_ternary_quadratic(eq):
"""
Returns the parametrized general solution for the ternary quadratic
equation ``eq`` which has the form
`ax^2 + by^2 + cz^2 + fxy + gyz + hxz = 0`.
Examples
========
>>> from sympy.abc import x, y, z
>>> from sympy.solvers.diophantine import parametrize_ternary_quadratic
>>> parametrize_ternary_quadratic(x**2 + y**2 - z**2)
(2*p*q, p**2 - q**2, p**2 + q**2)
Here `p` and `q` are two co-prime integers.
>>> parametrize_ternary_quadratic(3*x**2 + 2*y**2 - z**2 - 2*x*y + 5*y*z - 7*y*z)
(2*p**2 - 2*p*q - q**2, 2*p**2 + 2*p*q - q**2, 2*p**2 - 2*p*q + 3*q**2)
>>> parametrize_ternary_quadratic(124*x**2 - 30*y**2 - 7729*z**2)
(-1410*p**2 - 363263*q**2, 2700*p**2 + 30916*p*q - 695610*q**2, -60*p**2 + 5400*p*q + 15458*q**2)
References
==========
.. [1] The algorithmic resolution of Diophantine equations, Nigel P. Smart,
London Mathematical Society Student Texts 41, Cambridge University
Press, Cambridge, 1998.
"""
var, coeff, diop_type = classify_diop(eq)
if diop_type == "homogeneous_ternary_quadratic":
x_0, y_0, z_0 = _diop_ternary_quadratic(var, coeff)
return _parametrize_ternary_quadratic((x_0, y_0, z_0), var, coeff)
def _parametrize_ternary_quadratic(solution, _var, coeff):
x, y, z = _var[:3]
x_0, y_0, z_0 = solution[:3]
v = [x]*3
v[0], v[1], v[2] = _var[0], _var[1], _var[2]
if x_0 == None:
return (None, None, None)
if x_0 == 0:
if y_0 == 0:
v[0], v[2] = v[2], v[0]
z_p, y_p, x_p = _parametrize_ternary_quadratic((z_0, y_0, x_0), v, coeff)
return x_p, y_p, z_p
else:
v[0], v[1] = v[1], v[0]
y_p, x_p, z_p = _parametrize_ternary_quadratic((y_0, x_0, z_0), v, coeff)
return x_p, y_p, z_p
x, y, z = v[:3]
r, p, q = symbols("r, p, q", integer=True)
eq = x**2*coeff[x**2] + y**2*coeff[y**2] + z**2*coeff[z**2] + x*y*coeff[x*y] + y*z*coeff[y*z] + z*x*coeff[z*x]
eq_1 = Subs(eq, (x, y, z), (r*x_0, r*y_0 + p, r*z_0 + q)).doit()
eq_1 = _mexpand(eq_1)
A, B = eq_1.as_independent(r, as_Add=True)
x = A*x_0
y = (A*y_0 - _mexpand(B/r*p))
z = (A*z_0 - _mexpand(B/r*q))
return x, y, z
def diop_ternary_quadratic_normal(eq):
"""
Solves the quadratic ternary diophantine equation,
`ax^2 + by^2 + cz^2 = 0`.
Here the coefficients `a`, `b`, and `c` should be non zero. Otherwise the
equation will be a quadratic binary or univariate equation. If solvable,
returns a tuple `(x, y, z)` that satisifes the given equation. If the
equation does not have integer solutions, `(None, None, None)` is returned.
Usage
=====
``diop_ternary_quadratic_normal(eq)``: where ``eq`` is an equation of the form
`ax^2 + by^2 + cz^2 = 0`.
Examples
========
>>> from sympy.abc import x, y, z
>>> from sympy.solvers.diophantine import diop_ternary_quadratic_normal
>>> diop_ternary_quadratic_normal(x**2 + 3*y**2 - z**2)
(1, 0, 1)
>>> diop_ternary_quadratic_normal(4*x**2 + 5*y**2 - z**2)
(1, 0, 2)
>>> diop_ternary_quadratic_normal(34*x**2 - 3*y**2 - 301*z**2)
(4, 9, 1)
"""
var, coeff, diop_type = classify_diop(eq)
if diop_type == "homogeneous_ternary_quadratic":
return _diop_ternary_quadratic_normal(var, coeff)
def _diop_ternary_quadratic_normal(var, coeff):
x, y, z = var[:3]
a = coeff[x**2]
b = coeff[y**2]
c = coeff[z**2]
if a*b*c == 0:
raise ValueError("Try factoring out you equation or using diophantine()")
g = igcd(a, igcd(b, c))
a = a // g
b = b // g
c = c // g
a_0 = square_factor(a)
b_0 = square_factor(b)
c_0 = square_factor(c)
a_1 = a // a_0**2
b_1 = b // b_0**2
c_1 = c // c_0**2
a_2, b_2, c_2 = pairwise_prime(a_1, b_1, c_1)
A = -a_2*c_2
B = -b_2*c_2
# If following two conditions are satisified then there are no solutions
if A < 0 and B < 0:
return (None, None, None)
if (sqrt_mod(-b_2*c_2, a_2) == None or sqrt_mod(-c_2*a_2, b_2) == None or
sqrt_mod(-a_2*b_2, c_2) == None):
return (None, None, None)
z_0, x_0, y_0 = descent(A, B)
if divisible(z_0, c_2) == True:
z_0 = z_0 // abs(c_2)
else:
x_0 = x_0*(S(z_0)/c_2).q
y_0 = y_0*(S(z_0)/c_2).q
z_0 = (S(z_0)/c_2).p
x_0, y_0, z_0 = simplified(x_0, y_0, z_0)
# Holzer reduction
if sign(a) == sign(b):
x_0, y_0, z_0 = holzer(x_0, y_0, z_0, abs(a_2), abs(b_2), abs(c_2))
elif sign(a) == sign(c):
x_0, z_0, y_0 = holzer(x_0, z_0, y_0, abs(a_2), abs(c_2), abs(b_2))
else:
y_0, z_0, x_0 = holzer(y_0, z_0, x_0, abs(b_2), abs(c_2), abs(a_2))
x_0 = reconstruct(b_1, c_1, x_0)
y_0 = reconstruct(a_1, c_1, y_0)
z_0 = reconstruct(a_1, b_1, z_0)
l = ilcm(a_0, ilcm(b_0, c_0))
x_0 = abs(x_0*l//a_0)
y_0 = abs(y_0*l//b_0)
z_0 = abs(z_0*l//c_0)
return simplified(x_0, y_0, z_0)
def square_factor(a):
"""
Returns an integer `c` s.t. `a = c^2k, \ c,k \in Z`. Here `k` is square
free.
Examples
========
>>> from sympy.solvers.diophantine import square_factor
>>> square_factor(24)
2
>>> square_factor(36)
6
>>> square_factor(1)
1
"""
f = factorint(abs(a))
c = 1
for p, e in f.items():
c = c * p**(e//2)
return c
def pairwise_prime(a, b, c):
"""
Transform `ax^2 + by^2 + cz^2 = 0` into an equivalent equation
`a'x^2 + b'y^2 + c'z^2 = 0` where `a', b', c'` are pairwise relatively
prime.
Returns a tuple containing `a', b', c'`. `\gcd(a, b, c)` should equal `1`
for this to work. The solutions for `ax^2 + by^2 + cz^2 = 0` can be
recovered from the solutions of `a'x^2 + b'y^2 + c'z^2 = 0`.
Examples
========
>>> from sympy.solvers.diophantine import pairwise_prime
>>> pairwise_prime(6, 15, 10)
(5, 2, 3)
See Also
========
make_prime(), reocnstruct()
"""
a, b, c = make_prime(a, b, c)
b, c, a = make_prime(b, c, a)
c, a, b = make_prime(c, a, b)
return a, b, c
def make_prime(a, b, c):
"""
Transform the equation `ax^2 + by^2 + cz^2 = 0` to an equivalent equation
`a'x^2 + b'y^2 + c'z^2 = 0` with `\gcd(a', b') = 1`.
Returns a tuple `(a', b', c')` which satisfies above conditions. Note that
in the returned tuple `\gcd(a', c')` and `\gcd(b', c')` can take any value.
Examples
========
>>> from sympy.solvers.diophantine import make_prime
>>> make_prime(4, 2, 7)
(2, 1, 14)
See Also
========
pairwaise_prime(), reconstruct()
"""
g = igcd(a, b)
if g != 1:
f = factorint(g)
for p, e in f.items():
a = a // p**e
b = b // p**e
if e % 2 == 1:
c = p*c
return a, b, c
def reconstruct(a, b, z):
"""
Reconstruct the `z` value of an equivalent solution of `ax^2 + by^2 + cz^2`
from the `z` value of a solution of a transformed version of the above
equation.
"""
g = igcd(a, b)
if g != 1:
f = factorint(g)
for p, e in f.items():
if e %2 == 0:
z = z*p**(e//2)
else:
z = z*p**((e//2)+1)
return z
def ldescent(A, B):
"""
Uses Lagrange's method to find a non trivial solution to
`w^2 = Ax^2 + By^2`.
Here, `A \\neq 0` and `B \\neq 0` and `A` and `B` are square free. Output a
tuple `(w_0, x_0, y_0)` which is a solution to the above equation.
Examples
========
>>> from sympy.solvers.diophantine import ldescent
>>> ldescent(1, 1) # w^2 = x^2 + y^2
(1, 1, 0)
>>> ldescent(4, -7) # w^2 = 4x^2 - 7y^2
(2, -1, 0)
This means that `x = -1, y = 0` and `w = 2` is a solution to the equation
`w^2 = 4x^2 - 7y^2`
>>> ldescent(5, -1) # w^2 = 5x^2 - y^2
(2, 1, -1)
References
==========
.. [1] The algorithmic resolution of Diophantine equations, Nigel P. Smart,
London Mathematical Society Student Texts 41, Cambridge University
Press, Cambridge, 1998.
.. [2] Efficient Solution of Rational Conices, J. E. Cremona and D. Rusin,
Mathematics of Computation, Volume 00, Number 0.
"""
if abs(A) > abs(B):
w, y, x = ldescent(B, A)
return w, x, y
if A == 1:
return (S.One, S.One, 0)
if B == 1:
return (S.One, 0, S.One)
r = sqrt_mod(A, B)
Q = (r**2 - A) // B
if Q == 0:
B_0 = 1
d = 0
else:
div = divisors(Q)
B_0 = None
for i in div:
if isinstance(sqrt(abs(Q) // i), Integer):
B_0, d = sign(Q)*i, sqrt(abs(Q) // i)
break
if B_0 != None:
W, X, Y = ldescent(A, B_0)
return simplified((-A*X + r*W), (r*X - W), Y*(B_0*d))
# In this module Descent will always be called with inputs which have solutions.
def descent(A, B):
"""
Lagrange's `descent()` with lattice-reduction to find solutions to
`x^2 = Ay^2 + Bz^2`.
Here `A` and `B` should be square free and pairwise prime. Always should be
called with suitable ``A`` and ``B`` so that the above equation has
solutions.
This is more faster than the normal Lagrange's descent algorithm because
the gaussian reduction is used.
Examples
========
>>> from sympy.solvers.diophantine import descent
>>> descent(3, 1) # x**2 = 3*y**2 + z**2
(1, 0, 1)
`(x, y, z) = (1, 0, 1)` is a solution to the above equation.
>>> descent(41, -113)
(-16, -3, 1)
References
==========
.. [1] Efficient Solution of Rational Conices, J. E. Cremona and D. Rusin,
Mathematics of Computation, Volume 00, Number 0.
"""
if abs(A) > abs(B):
x, y, z = descent(B, A)
return x, z, y
if B == 1:
return (1, 0, 1)
if A == 1:
return (1, 1, 0)
if B == -1:
return (None, None, None)
if B == -A:
return (0, 1, 1)
if B == A:
x, z, y = descent(-1, A)
return (A*y, z, x)
w = sqrt_mod(A, B)
x_0, z_0 = gaussian_reduce(w, A, B)
t = (x_0**2 - A*z_0**2) // B
t_2 = square_factor(t)
t_1 = t // t_2**2
x_1, z_1, y_1 = descent(A, t_1)
return simplified(x_0*x_1 + A*z_0*z_1, z_0*x_1 + x_0*z_1, t_1*t_2*y_1)
def gaussian_reduce(w, a, b):
"""
Returns a reduced solution `(x, z)` to the congruence
`X^2 - aZ^2 \equiv 0 \ (mod \ b)` so that `x^2 + |a|z^2` is minimal.
Details
=======
Here ``w`` is a solution of the congruence `x^2 \equiv a \ (mod \ b)`
References
==========
.. [1] Gaussian lattice Reduction [online]. Available:
http://home.ie.cuhk.edu.hk/~wkshum/wordpress/?p=404
.. [2] Efficient Solution of Rational Conices, J. E. Cremona and D. Rusin,
Mathematics of Computation, Volume 00, Number 0.
"""
u = (0, 1)
v = (1, 0)
if dot(u, v, w, a, b) < 0:
v = (-v[0], -v[1])
if norm(u, w, a, b) < norm(v, w, a, b):
u, v = v, u
while norm(u, w, a, b) > norm(v, w, a, b):
k = dot(u, v, w, a, b) // dot(v, v, w, a, b)
u, v = v, (u[0]- k*v[0], u[1]- k*v[1])
u, v = v, u
if dot(u, v, w, a, b) < dot(v, v, w, a, b)/2 or norm((u[0]-v[0], u[1]-v[1]), w, a, b) > norm(v, w, a, b):
c = v
else:
c = (u[0] - v[0], u[1] - v[1])
return c[0]*w + b*c[1], c[0]
def dot(u, v, w, a, b):
"""
Returns a special dot product of the vectors `u = (u_{1}, u_{2})` and
`v = (v_{1}, v_{2})` which is defined in order to reduce solution of
the congruence equation `X^2 - aZ^2 \equiv 0 \ (mod \ b)`.
"""
u_1, u_2 = u[:2]
v_1, v_2 = v[:2]
return (w*u_1 + b*u_2)*(w*v_1 + b*v_2) + abs(a)*u_1*v_1
def norm(u, w, a, b):
"""
Returns the norm of the vector `u = (u_{1}, u_{2})` under the dot product
defined by `u \cdot v = (wu_{1} + bu_{2})(w*v_{1} + bv_{2}) + |a|*u_{1}*v_{1}`
where `u = (u_{1}, u_{2})` and `v = (v_{1}, v_{2})`.
"""
u_1, u_2 = u[:2]
return sqrt(dot((u_1, u_2), (u_1, u_2), w, a, b))
def holzer(x_0, y_0, z_0, a, b, c):
"""
Simplify the solution `(x_{0}, y_{0}, z_{0})` of the equation
`ax^2 + by^2 = cz^2` with `a, b, c > 0` and `z_{0}^2 \geq \mid ab \mid` to
a new reduced solution `(x, y, z)` such that `z^2 \leq \mid ab \mid`.
"""
while z_0 > sqrt(a*b):
if c % 2 == 0:
k = c // 2
u_0, v_0 = base_solution_linear(k, y_0, -x_0)
else:
k = 2*c
u_0, v_0 = base_solution_linear(c, y_0, -x_0)
w = -(a*u_0*x_0 + b*v_0*y_0) // (c*z_0)
if c % 2 == 1:
if w % 2 != (a*u_0 + b*v_0) % 2:
w = w + 1
x = (x_0*(a*u_0**2 + b*v_0**2 + c*w**2) - 2*u_0*(a*u_0*x_0 + b*v_0*y_0 + c*w*z_0)) // k
y = (y_0*(a*u_0**2 + b*v_0**2 + c*w**2) - 2*v_0*(a*u_0*x_0 + b*v_0*y_0 + c*w*z_0)) // k
z = (z_0*(a*u_0**2 + b*v_0**2 + c*w**2) - 2*w*(a*u_0*x_0 + b*v_0*y_0 + c*w*z_0)) // k
x_0, y_0, z_0 = x, y, z
return x_0, y_0, z_0
def diop_general_pythagorean(eq, param=symbols("m", integer=True)):
"""
Solves the general pythagorean equation,
`a_{1}^2x_{1}^2 + a_{2}^2x_{2}^2 + . . . + a_{n}^2x_{n}^2 - a_{n + 1}^2x_{n + 1}^2 = 0`.
Returns a tuple which contains a parametrized solution to the equation,
sorted in the same order as the input variables.
Usage
=====
``diop_general_pythagorean(eq, param)``: where ``eq`` is a general
pythagorean equation which is assumed to be zero and ``param`` is the base
parameter used to construct other parameters by subscripting.
Examples
========
>>> from sympy.solvers.diophantine import diop_general_pythagorean
>>> from sympy.abc import a, b, c, d, e
>>> diop_general_pythagorean(a**2 + b**2 + c**2 - d**2)
(m1**2 + m2**2 - m3**2, 2*m1*m3, 2*m2*m3, m1**2 + m2**2 + m3**2)
>>> diop_general_pythagorean(9*a**2 - 4*b**2 + 16*c**2 + 25*d**2 + e**2)
(10*m1**2 + 10*m2**2 + 10*m3**2 - 10*m4**2, 15*m1**2 + 15*m2**2 + 15*m3**2 + 15*m4**2, 15*m1*m4, 12*m2*m4, 60*m3*m4)
"""
var, coeff, diop_type = classify_diop(eq)
if diop_type == "general_pythagorean":
return _diop_general_pythagorean(var, coeff, param)
def _diop_general_pythagorean(var, coeff, t):
if sign(coeff[var[0]**2]) + sign(coeff[var[1]**2]) + sign(coeff[var[2]**2]) < 0:
for key in coeff.keys():
coeff[key] = coeff[key] * -1
n = len(var)
index = 0
for i, v in enumerate(var):
if sign(coeff[v**2]) == -1:
index = i
m = symbols(str(t) + "1:" + str(n), integer=True)
l = []
ith = 0
for m_i in m:
ith = ith + m_i**2
l.append(ith - 2*m[n - 2]**2)
for i in range(n - 2):
l.append(2*m[i]*m[n-2])
sol = l[:index] + [ith] + l[index:]
lcm = 1
for i, v in enumerate(var):
if i == index or (index > 0 and i == 0) or (index == 0 and i == 1):
lcm = ilcm(lcm, sqrt(abs(coeff[v**2])))
else:
lcm = ilcm(lcm, sqrt(coeff[v**2]) if sqrt(coeff[v**2]) % 2 else sqrt(coeff[v**2]) // 2)
for i, v in enumerate(var):
sol[i] = (lcm*sol[i]) / sqrt(abs(coeff[v**2]))
return tuple(sol)
def diop_general_sum_of_squares(eq, limit=1):
"""
Solves the equation `x_{1}^2 + x_{2}^2 + . . . + x_{n}^2 - k = 0`.
Returns at most ``limit`` number of solutions. Currently there is no way to
set ``limit`` using higher level API's like ``diophantine()`` or
``diop_solve()`` but that will be fixed soon.
Usage
=====
``general_sum_of_squares(eq, limit)`` : Here ``eq`` is an expression which
is assumed to be zero. Also, ``eq`` should be in the form,
`x_{1}^2 + x_{2}^2 + . . . + x_{n}^2 - k = 0`. At most ``limit`` number of
solutions are returned.
Details
=======
When `n = 3` if `k = 4^a(8m + 7)` for some `a, m \in Z` then there will be
no solutions. Refer [1]_ for more details.
Examples
========
>>> from sympy.solvers.diophantine import diop_general_sum_of_squares
>>> from sympy.abc import a, b, c, d, e, f
>>> diop_general_sum_of_squares(a**2 + b**2 + c**2 + d**2 + e**2 - 2345)
set([(0, 48, 5, 4, 0)])
Reference
=========
.. [1] Representing an Integer as a sum of three squares, [online],
Available:
http://www.proofwiki.org/wiki/Integer_as_Sum_of_Three_Squares
"""
var, coeff, diop_type = classify_diop(eq)
if diop_type == "general_sum_of_squares":
return _diop_general_sum_of_squares(var, coeff, limit)
def _diop_general_sum_of_squares(var, coeff, limit=1):
n = len(var)
k = -int(coeff[Integer(1)])
s = set([])
if k < 0:
return set([])
if n == 3:
s.add(sum_of_three_squares(k))
elif n == 4:
s.add(sum_of_four_squares(k))
else:
m = n // 4
f = partition(k, m, True)
for j in range(limit):
soln = []
try:
l = next(f)
except StopIteration:
break
for n_i in l:
a, b, c, d = sum_of_four_squares(n_i)
soln = soln + [a, b, c, d]
soln = soln + [0] * (n % 4)
s.add(tuple(soln))
return s
## Functions below this comment can be more suitably grouped under an Additive number theory module
## rather than the Diophantine equation module.
def partition(n, k=None, zeros=False):
"""
Returns a generator that can be used to generate partitions of an integer
`n`.
A partition of `n` is a set of positive integers which add upto `n`. For
example, partitions of 3 are 3 , 1 + 2, 1 + 1+ 1. A partition is returned
as a tuple. If ``k`` equals None, then all possible partitions are returned
irrespective of their size, otherwise only the partitions of size ``k`` are
returned. If there are no partions of `n` with size `k` then an empty tuple
is returned. If the ``zero`` parameter is set to True then a suitable
number of zeros are added at the end of every partition of size less than
``k``.
``zero`` parameter is considered only if ``k`` is not None. When the
partitions are over, the last `next()` call throws the ``StopIteration``
exception, so this function should always be used inside a try - except
block.
Details
=======
``partition(n, k)``: Here ``n`` is a positive integer and ``k`` is the size
of the partition which is also positive integer.
Examples
========
>>> from sympy.solvers.diophantine import partition
>>> f = partition(5)
>>> next(f)
(1, 1, 1, 1, 1)
>>> next(f)
(1, 1, 1, 2)
>>> g = partition(5, 3)
>>> next(g)
(3, 1, 1)
>>> next(g)
(2, 2, 1)
Reference
=========
.. [1] Generating Integer Partitions, [online],
Available: http://jeromekelleher.net/partitions.php
"""
if n < 1:
yield tuple()
if k is not None:
if k < 1:
yield tuple()
elif k > n:
if zeros:
for i in range(1, n):
for t in partition(n, i):
yield (t,) + (0,) * (k - i)
else:
yield tuple()
else:
a = [1 for i in range(k)]
a[0] = n - k + 1
yield tuple(a)
i = 1
while a[0] >= n // k + 1:
j = 0
while j < i and j + 1 < k:
a[j] = a[j] - 1
a[j + 1] = a[j + 1] + 1
yield tuple(a)
j = j + 1
i = i + 1
if zeros:
for m in range(1, k):
for a in partition(n, m):
yield tuple(a) + (0,) * (k - m)
else:
a = [0 for i in range(n + 1)]
l = 1
y = n - 1
while l != 0:
x = a[l - 1] + 1
l -= 1
while 2*x <= y:
a[l] = x
y -= x
l += 1
m = l + 1
while x <= y:
a[l] = x
a[m] = y
yield tuple(a[:l + 2])
x += 1
y -= 1
a[l] = x + y
y = x + y - 1
yield tuple(a[:l + 1])
def prime_as_sum_of_two_squares(p):
"""
Represent a prime `p` which is congruent to 1 mod 4, as a sum of two
squares.
Examples
========
>>> from sympy.solvers.diophantine import prime_as_sum_of_two_squares
>>> prime_as_sum_of_two_squares(5)
(2, 1)
Reference
=========
.. [1] Representing a number as a sum of four squares, [online],
Available: http://www.schorn.ch/howto.html
"""
if p % 8 == 5:
b = 2
else:
b = 3
while pow(b, (p - 1) // 2, p) == 1:
b = nextprime(b)
b = pow(b, (p - 1) // 4, p)
a = p
while b**2 > p:
a, b = b, a % b
return (b, a % b)
def sum_of_three_squares(n):
"""
Returns a 3-tuple `(a, b, c)` such that `a^2 + b^2 + c^2 = n` and
`a, b, c \geq 0`.
Returns (None, None, None) if `n = 4^a(8m + 7)` for some `a, m \in Z`. See
[1]_ for more details.
Usage
=====
``sum_of_three_squares(n)``: Here ``n`` is a non-negative integer.
Examples
========
>>> from sympy.solvers.diophantine import sum_of_three_squares
>>> sum_of_three_squares(44542)
(207, 37, 18)
References
==========
.. [1] Representing a number as a sum of three squares, [online],
Available: http://www.schorn.ch/howto.html
"""
special = {1:(1, 0, 0), 2:(1, 1, 0), 3:(1, 1, 1), 10: (1, 3, 0), 34: (3, 3, 4), 58:(3, 7, 0),
85:(6, 7, 0), 130:(3, 11, 0), 214:(3, 6, 13), 226:(8, 9, 9), 370:(8, 9, 15),
526:(6, 7, 21), 706:(15, 15, 16), 730:(1, 27, 0), 1414:(6, 17, 33), 1906:(13, 21, 36),
2986: (21, 32, 39), 9634: (56, 57, 57)}
v = 0
if n == 0:
return (0, 0, 0)
while n % 4 == 0:
v = v + 1
n = n // 4
if n % 8 == 7:
return (None, None, None)
if n in special.keys():
x, y, z = special[n]
return (2**v*x, 2**v*y, 2**v*z)
l = int(sqrt(n))
if n == l**2:
return (2**v*l, 0, 0)
x = None
if n % 8 == 3:
l = l if l % 2 else l - 1
for i in range(l, -1, -2):
if isprime((n - i**2) // 2):
x = i
break
y, z = prime_as_sum_of_two_squares((n - x**2) // 2)
return (2**v*x, 2**v*(y + z), 2**v*abs(y - z))
if n % 8 == 2 or n % 8 == 6:
l = l if l % 2 else l - 1
else:
l = l - 1 if l % 2 else l
for i in range(l, -1, -2):
if isprime(n - i**2):
x = i
break
y, z = prime_as_sum_of_two_squares(n - x**2)
return (2**v*x, 2**v*y, 2**v*z)
def sum_of_four_squares(n):
"""
Returns a 4-tuple `(a, b, c, d)` such that `a^2 + b^2 + c^2 + d^2 = n`.
Here `a, b, c, d \geq 0`.
Usage
=====
``sum_of_four_squares(n)``: Here ``n`` is a non-negative integer.
Examples
========
>>> from sympy.solvers.diophantine import sum_of_four_squares
>>> sum_of_four_squares(3456)
(8, 48, 32, 8)
>>> sum_of_four_squares(1294585930293)
(0, 1137796, 2161, 1234)
References
==========
.. [1] Representing a number as a sum of four squares, [online],
Available: http://www.schorn.ch/howto.html
"""
if n == 0:
return (0, 0, 0, 0)
v = 0
while n % 4 == 0:
v = v + 1
n = n // 4
if n % 8 == 7:
d = 2
n = n - 4
elif n % 8 == 6 or n % 8 == 2:
d = 1
n = n - 1
else:
d = 0
x, y, z = sum_of_three_squares(n)
return (2**v*d, 2**v*x, 2**v*y, 2**v*z)
def power_representation(n, p, k, zeros=False):
"""
Returns a generator for finding k-tuples `(n_{1}, n_{2}, . . . n_{k})` such
that `n = n_{1}^p + n_{2}^p + . . . n_{k}^p`.
Here `n` is a non-negative integer. StopIteration exception is raised after
all the solutions are generated, so should always be used within a try-
catch block.
Usage
=====
``power_representation(n, p, k, zeros)``: Represent number ``n`` as a sum
of ``k``, ``p``th powers. If ``zeros`` is true, then the solutions will
contain zeros.
Examples
========
>>> from sympy.solvers.diophantine import power_representation
>>> f = power_representation(1729, 3, 2) # Represent 1729 as a sum of two cubes
>>> next(f)
(12, 1)
>>> next(f)
(10, 9)
"""
if p < 1 or k < 1 or n < 1:
raise ValueError("Expected: n > 0 and k >= 1 and p >= 1")
if k == 1:
if perfect_power(n):
yield (perfect_power(n)[0],)
else:
yield tuple()
elif p == 1:
for t in partition(n, k, zeros):
yield t
else:
l = []
a = integer_nthroot(n, p)[0]
for t in pow_rep_recursive(a, k, n, [], p):
yield t
if zeros:
for i in range(2, k):
for t in pow_rep_recursive(a, i, n, [], p):
yield t + (0,) * (k - i)
def pow_rep_recursive(n_i, k, n_remaining, terms, p):
if k == 0 and n_remaining == 0:
yield tuple(terms)
else:
if n_i >= 1 and k > 0 and n_remaining >= 0:
if n_i**p <= n_remaining:
for t in pow_rep_recursive(n_i, k - 1, n_remaining - n_i**p, terms + [n_i], p):
yield t
for t in pow_rep_recursive(n_i - 1, k, n_remaining, terms, p):
yield t
| bsd-3-clause |
yusiwen/offlineimap | offlineimap/syncmaster.py | 22 | 1711 | # OfflineIMAP synchronization master code
# Copyright (C) 2002-2007 John Goerzen
# <jgoerzen@complete.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from offlineimap.threadutil import threadlist, InstanceLimitedThread
from offlineimap.accounts import SyncableAccount
from threading import currentThread
def syncaccount(threads, config, accountname):
account = SyncableAccount(config, accountname)
thread = InstanceLimitedThread(instancename = 'ACCOUNTLIMIT',
target = account.syncrunner,
name = "Account sync %s" % accountname)
thread.setDaemon(True)
thread.start()
threads.add(thread)
def syncitall(accounts, config):
# Special exit message for SyncRunner thread, so main thread can exit
currentThread().exit_message = 'SYNCRUNNER_EXITED_NORMALLY'
threads = threadlist()
for accountname in accounts:
syncaccount(threads, config, accountname)
# Wait for the threads to finish.
threads.reset()
| gpl-2.0 |
joshimoo/Algorithm-Implementations | Fibonacci_series/Python/jcla1/fib.py | 27 | 1515 | # This is a recursive implementation of the fibonacci series.
# This is the most natural representaion of the series
# The recurrence relation is quite simple:
# f(0) = 0
# f(1) = 1
# f(n) = f(n-1) + f(n-2)
# There is no point in trying to implement
# the fibonacci series in a tail-call recursive manner
# since the BDFL has stated multiple times that Python
# will not support tail-call optimisation (and it would
# just over complicate things)
def fib_rec(n):
if n < 2:
return n
else:
return fib_rec(n-1) + fib_rec(n-2)
# This implementation is a generator
# You call the function and get the generator back
# For getting values you can loop through the generator,
# just like with any other iterable
def fib():
a, b = 0, 1
while True:
# This next line makes this function a generator
# Each time the executor reaches this yield stmt
# it halts execution and gives (yields) the values
yield a
# This makes use of Python's tuple
# unpacking feature, which makes it
# easy to swap variables (or in this
# case update them simultaneously)
# without another temporary variable
a, b = b, a+b
# This function gives you the kth fibonacci
# term. It works just the same as the above
# implementation, with the exception that
# you only get one fibonacci value (the one-th
# you give as an argument)
def kth_fib(k):
a, b = 0, 1
while k != 0:
a, b = b, a+b
k -= 1
return a
| mit |
jmaurice/letsencrypt | acme/acme/jose/jwk_test.py | 31 | 5436 | """Tests for acme.jose.jwk."""
import unittest
from acme import test_util
from acme.jose import errors
from acme.jose import json_util
from acme.jose import util
DSA_PEM = test_util.load_vector('dsa512_key.pem')
RSA256_KEY = test_util.load_rsa_private_key('rsa256_key.pem')
RSA512_KEY = test_util.load_rsa_private_key('rsa512_key.pem')
class JWKTest(unittest.TestCase):
"""Tests for acme.jose.jwk.JWK."""
def test_load(self):
from acme.jose.jwk import JWK
self.assertRaises(errors.Error, JWK.load, DSA_PEM)
def test_load_subclass_wrong_type(self):
from acme.jose.jwk import JWKRSA
self.assertRaises(errors.Error, JWKRSA.load, DSA_PEM)
class JWKOctTest(unittest.TestCase):
"""Tests for acme.jose.jwk.JWKOct."""
def setUp(self):
from acme.jose.jwk import JWKOct
self.jwk = JWKOct(key=b'foo')
self.jobj = {'kty': 'oct', 'k': json_util.encode_b64jose(b'foo')}
def test_to_partial_json(self):
self.assertEqual(self.jwk.to_partial_json(), self.jobj)
def test_from_json(self):
from acme.jose.jwk import JWKOct
self.assertEqual(self.jwk, JWKOct.from_json(self.jobj))
def test_from_json_hashable(self):
from acme.jose.jwk import JWKOct
hash(JWKOct.from_json(self.jobj))
def test_load(self):
from acme.jose.jwk import JWKOct
self.assertEqual(self.jwk, JWKOct.load(b'foo'))
def test_public_key(self):
self.assertTrue(self.jwk.public_key() is self.jwk)
class JWKRSATest(unittest.TestCase):
"""Tests for acme.jose.jwk.JWKRSA."""
# pylint: disable=too-many-instance-attributes
def setUp(self):
from acme.jose.jwk import JWKRSA
self.jwk256 = JWKRSA(key=RSA256_KEY.public_key())
self.jwk256json = {
'kty': 'RSA',
'e': 'AQAB',
'n': 'm2Fylv-Uz7trgTW8EBHP3FQSMeZs2GNQ6VRo1sIVJEk',
}
# pylint: disable=protected-access
self.jwk256_not_comparable = JWKRSA(
key=RSA256_KEY.public_key()._wrapped)
self.jwk512 = JWKRSA(key=RSA512_KEY.public_key())
self.jwk512json = {
'kty': 'RSA',
'e': 'AQAB',
'n': 'rHVztFHtH92ucFJD_N_HW9AsdRsUuHUBBBDlHwNlRd3fp5'
'80rv2-6QWE30cWgdmJS86ObRz6lUTor4R0T-3C5Q',
}
self.private = JWKRSA(key=RSA256_KEY)
self.private_json_small = self.jwk256json.copy()
self.private_json_small['d'] = (
'lPQED_EPTV0UIBfNI3KP2d9Jlrc2mrMllmf946bu-CE')
self.private_json = self.jwk256json.copy()
self.private_json.update({
'd': 'lPQED_EPTV0UIBfNI3KP2d9Jlrc2mrMllmf946bu-CE',
'p': 'zUVNZn4lLLBD1R6NE8TKNQ',
'q': 'wcfKfc7kl5jfqXArCRSURQ',
'dp': 'CWJFq43QvT5Bm5iN8n1okQ',
'dq': 'bHh2u7etM8LKKCF2pY2UdQ',
'qi': 'oi45cEkbVoJjAbnQpFY87Q',
})
def test_init_auto_comparable(self):
self.assertTrue(isinstance(
self.jwk256_not_comparable.key, util.ComparableRSAKey))
self.assertEqual(self.jwk256, self.jwk256_not_comparable)
def test_encode_param_zero(self):
from acme.jose.jwk import JWKRSA
# pylint: disable=protected-access
# TODO: move encode/decode _param to separate class
self.assertEqual('AA', JWKRSA._encode_param(0))
def test_equals(self):
self.assertEqual(self.jwk256, self.jwk256)
self.assertEqual(self.jwk512, self.jwk512)
def test_not_equals(self):
self.assertNotEqual(self.jwk256, self.jwk512)
self.assertNotEqual(self.jwk512, self.jwk256)
def test_load(self):
from acme.jose.jwk import JWKRSA
self.assertEqual(self.private, JWKRSA.load(
test_util.load_vector('rsa256_key.pem')))
def test_public_key(self):
self.assertEqual(self.jwk256, self.private.public_key())
def test_to_partial_json(self):
self.assertEqual(self.jwk256.to_partial_json(), self.jwk256json)
self.assertEqual(self.jwk512.to_partial_json(), self.jwk512json)
self.assertEqual(self.private.to_partial_json(), self.private_json)
def test_from_json(self):
from acme.jose.jwk import JWK
self.assertEqual(
self.jwk256, JWK.from_json(self.jwk256json))
self.assertEqual(
self.jwk512, JWK.from_json(self.jwk512json))
self.assertEqual(self.private, JWK.from_json(self.private_json))
def test_from_json_private_small(self):
from acme.jose.jwk import JWK
self.assertEqual(self.private, JWK.from_json(self.private_json_small))
def test_from_json_missing_one_additional(self):
from acme.jose.jwk import JWK
del self.private_json['q']
self.assertRaises(errors.Error, JWK.from_json, self.private_json)
def test_from_json_hashable(self):
from acme.jose.jwk import JWK
hash(JWK.from_json(self.jwk256json))
def test_from_json_non_schema_errors(self):
# valid against schema, but still failing
from acme.jose.jwk import JWK
self.assertRaises(errors.DeserializationError, JWK.from_json,
{'kty': 'RSA', 'e': 'AQAB', 'n': ''})
self.assertRaises(errors.DeserializationError, JWK.from_json,
{'kty': 'RSA', 'e': 'AQAB', 'n': '1'})
if __name__ == '__main__':
unittest.main() # pragma: no cover
| apache-2.0 |
naderm/cs188 | p3/reinforcement/graphicsDisplay.py | 6 | 28082 | # graphicsDisplay.py
# ------------------
# Licensing Information: You are free to use or extend these projects for
# educational purposes provided that (1) you do not distribute or publish
# solutions, (2) you retain this notice, and (3) you provide clear
# attribution to UC Berkeley, including a link to
# http://inst.eecs.berkeley.edu/~cs188/pacman/pacman.html
#
# Attribution Information: The Pacman AI projects were developed at UC Berkeley.
# The core projects and autograders were primarily created by John DeNero
# (denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu).
# Student side autograding was added by Brad Miller, Nick Hay, and
# Pieter Abbeel (pabbeel@cs.berkeley.edu).
from graphicsUtils import *
import math, time
from game import Directions
###########################
# GRAPHICS DISPLAY CODE #
###########################
# Most code by Dan Klein and John Denero written or rewritten for cs188, UC Berkeley.
# Some code from a Pacman implementation by LiveWires, and used / modified with permission.
DEFAULT_GRID_SIZE = 30.0
INFO_PANE_HEIGHT = 35
BACKGROUND_COLOR = formatColor(0,0,0)
WALL_COLOR = formatColor(0.0/255.0, 51.0/255.0, 255.0/255.0)
INFO_PANE_COLOR = formatColor(.4,.4,0)
SCORE_COLOR = formatColor(.9, .9, .9)
PACMAN_OUTLINE_WIDTH = 2
PACMAN_CAPTURE_OUTLINE_WIDTH = 4
GHOST_COLORS = []
GHOST_COLORS.append(formatColor(.9,0,0)) # Red
GHOST_COLORS.append(formatColor(0,.3,.9)) # Blue
GHOST_COLORS.append(formatColor(.98,.41,.07)) # Orange
GHOST_COLORS.append(formatColor(.1,.75,.7)) # Green
GHOST_COLORS.append(formatColor(1.0,0.6,0.0)) # Yellow
GHOST_COLORS.append(formatColor(.4,0.13,0.91)) # Purple
TEAM_COLORS = GHOST_COLORS[:2]
GHOST_SHAPE = [
( 0, 0.3 ),
( 0.25, 0.75 ),
( 0.5, 0.3 ),
( 0.75, 0.75 ),
( 0.75, -0.5 ),
( 0.5, -0.75 ),
(-0.5, -0.75 ),
(-0.75, -0.5 ),
(-0.75, 0.75 ),
(-0.5, 0.3 ),
(-0.25, 0.75 )
]
GHOST_SIZE = 0.65
SCARED_COLOR = formatColor(1,1,1)
GHOST_VEC_COLORS = map(colorToVector, GHOST_COLORS)
PACMAN_COLOR = formatColor(255.0/255.0,255.0/255.0,61.0/255)
PACMAN_SCALE = 0.5
#pacman_speed = 0.25
# Food
FOOD_COLOR = formatColor(1,1,1)
FOOD_SIZE = 0.1
# Laser
LASER_COLOR = formatColor(1,0,0)
LASER_SIZE = 0.02
# Capsule graphics
CAPSULE_COLOR = formatColor(1,1,1)
CAPSULE_SIZE = 0.25
# Drawing walls
WALL_RADIUS = 0.15
class InfoPane:
def __init__(self, layout, gridSize):
self.gridSize = gridSize
self.width = (layout.width) * gridSize
self.base = (layout.height + 1) * gridSize
self.height = INFO_PANE_HEIGHT
self.fontSize = 24
self.textColor = PACMAN_COLOR
self.drawPane()
def toScreen(self, pos, y = None):
"""
Translates a point relative from the bottom left of the info pane.
"""
if y == None:
x,y = pos
else:
x = pos
x = self.gridSize + x # Margin
y = self.base + y
return x,y
def drawPane(self):
self.scoreText = text( self.toScreen(0, 0 ), self.textColor, "SCORE: 0", "Times", self.fontSize, "bold")
def initializeGhostDistances(self, distances):
self.ghostDistanceText = []
size = 20
if self.width < 240:
size = 12
if self.width < 160:
size = 10
for i, d in enumerate(distances):
t = text( self.toScreen(self.width/2 + self.width/8 * i, 0), GHOST_COLORS[i+1], d, "Times", size, "bold")
self.ghostDistanceText.append(t)
def updateScore(self, score):
changeText(self.scoreText, "SCORE: % 4d" % score)
def setTeam(self, isBlue):
text = "RED TEAM"
if isBlue: text = "BLUE TEAM"
self.teamText = text( self.toScreen(300, 0 ), self.textColor, text, "Times", self.fontSize, "bold")
def updateGhostDistances(self, distances):
if len(distances) == 0: return
if 'ghostDistanceText' not in dir(self): self.initializeGhostDistances(distances)
else:
for i, d in enumerate(distances):
changeText(self.ghostDistanceText[i], d)
def drawGhost(self):
pass
def drawPacman(self):
pass
def drawWarning(self):
pass
def clearIcon(self):
pass
def updateMessage(self, message):
pass
def clearMessage(self):
pass
class PacmanGraphics:
def __init__(self, zoom=1.0, frameTime=0.0, capture=False):
self.have_window = 0
self.currentGhostImages = {}
self.pacmanImage = None
self.zoom = zoom
self.gridSize = DEFAULT_GRID_SIZE * zoom
self.capture = capture
self.frameTime = frameTime
def checkNullDisplay(self):
return False
def initialize(self, state, isBlue = False):
self.isBlue = isBlue
self.startGraphics(state)
# self.drawDistributions(state)
self.distributionImages = None # Initialized lazily
self.drawStaticObjects(state)
self.drawAgentObjects(state)
# Information
self.previousState = state
def startGraphics(self, state):
self.layout = state.layout
layout = self.layout
self.width = layout.width
self.height = layout.height
self.make_window(self.width, self.height)
self.infoPane = InfoPane(layout, self.gridSize)
self.currentState = layout
def drawDistributions(self, state):
walls = state.layout.walls
dist = []
for x in range(walls.width):
distx = []
dist.append(distx)
for y in range(walls.height):
( screen_x, screen_y ) = self.to_screen( (x, y) )
block = square( (screen_x, screen_y),
0.5 * self.gridSize,
color = BACKGROUND_COLOR,
filled = 1, behind=2)
distx.append(block)
self.distributionImages = dist
def drawStaticObjects(self, state):
layout = self.layout
self.drawWalls(layout.walls)
self.food = self.drawFood(layout.food)
self.capsules = self.drawCapsules(layout.capsules)
refresh()
def drawAgentObjects(self, state):
self.agentImages = [] # (agentState, image)
for index, agent in enumerate(state.agentStates):
if agent.isPacman:
image = self.drawPacman(agent, index)
self.agentImages.append( (agent, image) )
else:
image = self.drawGhost(agent, index)
self.agentImages.append( (agent, image) )
refresh()
def swapImages(self, agentIndex, newState):
"""
Changes an image from a ghost to a pacman or vis versa (for capture)
"""
prevState, prevImage = self.agentImages[agentIndex]
for item in prevImage: remove_from_screen(item)
if newState.isPacman:
image = self.drawPacman(newState, agentIndex)
self.agentImages[agentIndex] = (newState, image )
else:
image = self.drawGhost(newState, agentIndex)
self.agentImages[agentIndex] = (newState, image )
refresh()
def update(self, newState):
agentIndex = newState._agentMoved
agentState = newState.agentStates[agentIndex]
if self.agentImages[agentIndex][0].isPacman != agentState.isPacman: self.swapImages(agentIndex, agentState)
prevState, prevImage = self.agentImages[agentIndex]
if agentState.isPacman:
self.animatePacman(agentState, prevState, prevImage)
else:
self.moveGhost(agentState, agentIndex, prevState, prevImage)
self.agentImages[agentIndex] = (agentState, prevImage)
if newState._foodEaten != None:
self.removeFood(newState._foodEaten, self.food)
if newState._capsuleEaten != None:
self.removeCapsule(newState._capsuleEaten, self.capsules)
self.infoPane.updateScore(newState.score)
if 'ghostDistances' in dir(newState):
self.infoPane.updateGhostDistances(newState.ghostDistances)
def make_window(self, width, height):
grid_width = (width-1) * self.gridSize
grid_height = (height-1) * self.gridSize
screen_width = 2*self.gridSize + grid_width
screen_height = 2*self.gridSize + grid_height + INFO_PANE_HEIGHT
begin_graphics(screen_width,
screen_height,
BACKGROUND_COLOR,
"CS188 Pacman")
def drawPacman(self, pacman, index):
position = self.getPosition(pacman)
screen_point = self.to_screen(position)
endpoints = self.getEndpoints(self.getDirection(pacman))
width = PACMAN_OUTLINE_WIDTH
outlineColor = PACMAN_COLOR
fillColor = PACMAN_COLOR
if self.capture:
outlineColor = TEAM_COLORS[index % 2]
fillColor = GHOST_COLORS[index]
width = PACMAN_CAPTURE_OUTLINE_WIDTH
return [circle(screen_point, PACMAN_SCALE * self.gridSize,
fillColor = fillColor, outlineColor = outlineColor,
endpoints = endpoints,
width = width)]
def getEndpoints(self, direction, position=(0,0)):
x, y = position
pos = x - int(x) + y - int(y)
width = 30 + 80 * math.sin(math.pi* pos)
delta = width / 2
if (direction == 'West'):
endpoints = (180+delta, 180-delta)
elif (direction == 'North'):
endpoints = (90+delta, 90-delta)
elif (direction == 'South'):
endpoints = (270+delta, 270-delta)
else:
endpoints = (0+delta, 0-delta)
return endpoints
def movePacman(self, position, direction, image):
screenPosition = self.to_screen(position)
endpoints = self.getEndpoints( direction, position )
r = PACMAN_SCALE * self.gridSize
moveCircle(image[0], screenPosition, r, endpoints)
refresh()
def animatePacman(self, pacman, prevPacman, image):
if self.frameTime < 0:
print 'Press any key to step forward, "q" to play'
keys = wait_for_keys()
if 'q' in keys:
self.frameTime = 0.1
if self.frameTime > 0.01 or self.frameTime < 0:
start = time.time()
fx, fy = self.getPosition(prevPacman)
px, py = self.getPosition(pacman)
frames = 4.0
for i in range(1,int(frames) + 1):
pos = px*i/frames + fx*(frames-i)/frames, py*i/frames + fy*(frames-i)/frames
self.movePacman(pos, self.getDirection(pacman), image)
refresh()
sleep(abs(self.frameTime) / frames)
else:
self.movePacman(self.getPosition(pacman), self.getDirection(pacman), image)
refresh()
def getGhostColor(self, ghost, ghostIndex):
if ghost.scaredTimer > 0:
return SCARED_COLOR
else:
return GHOST_COLORS[ghostIndex]
def drawGhost(self, ghost, agentIndex):
pos = self.getPosition(ghost)
dir = self.getDirection(ghost)
(screen_x, screen_y) = (self.to_screen(pos) )
coords = []
for (x, y) in GHOST_SHAPE:
coords.append((x*self.gridSize*GHOST_SIZE + screen_x, y*self.gridSize*GHOST_SIZE + screen_y))
colour = self.getGhostColor(ghost, agentIndex)
body = polygon(coords, colour, filled = 1)
WHITE = formatColor(1.0, 1.0, 1.0)
BLACK = formatColor(0.0, 0.0, 0.0)
dx = 0
dy = 0
if dir == 'North':
dy = -0.2
if dir == 'South':
dy = 0.2
if dir == 'East':
dx = 0.2
if dir == 'West':
dx = -0.2
leftEye = circle((screen_x+self.gridSize*GHOST_SIZE*(-0.3+dx/1.5), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy/1.5)), self.gridSize*GHOST_SIZE*0.2, WHITE, WHITE)
rightEye = circle((screen_x+self.gridSize*GHOST_SIZE*(0.3+dx/1.5), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy/1.5)), self.gridSize*GHOST_SIZE*0.2, WHITE, WHITE)
leftPupil = circle((screen_x+self.gridSize*GHOST_SIZE*(-0.3+dx), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy)), self.gridSize*GHOST_SIZE*0.08, BLACK, BLACK)
rightPupil = circle((screen_x+self.gridSize*GHOST_SIZE*(0.3+dx), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy)), self.gridSize*GHOST_SIZE*0.08, BLACK, BLACK)
ghostImageParts = []
ghostImageParts.append(body)
ghostImageParts.append(leftEye)
ghostImageParts.append(rightEye)
ghostImageParts.append(leftPupil)
ghostImageParts.append(rightPupil)
return ghostImageParts
def moveEyes(self, pos, dir, eyes):
(screen_x, screen_y) = (self.to_screen(pos) )
dx = 0
dy = 0
if dir == 'North':
dy = -0.2
if dir == 'South':
dy = 0.2
if dir == 'East':
dx = 0.2
if dir == 'West':
dx = -0.2
moveCircle(eyes[0],(screen_x+self.gridSize*GHOST_SIZE*(-0.3+dx/1.5), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy/1.5)), self.gridSize*GHOST_SIZE*0.2)
moveCircle(eyes[1],(screen_x+self.gridSize*GHOST_SIZE*(0.3+dx/1.5), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy/1.5)), self.gridSize*GHOST_SIZE*0.2)
moveCircle(eyes[2],(screen_x+self.gridSize*GHOST_SIZE*(-0.3+dx), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy)), self.gridSize*GHOST_SIZE*0.08)
moveCircle(eyes[3],(screen_x+self.gridSize*GHOST_SIZE*(0.3+dx), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy)), self.gridSize*GHOST_SIZE*0.08)
def moveGhost(self, ghost, ghostIndex, prevGhost, ghostImageParts):
old_x, old_y = self.to_screen(self.getPosition(prevGhost))
new_x, new_y = self.to_screen(self.getPosition(ghost))
delta = new_x - old_x, new_y - old_y
for ghostImagePart in ghostImageParts:
move_by(ghostImagePart, delta)
refresh()
if ghost.scaredTimer > 0:
color = SCARED_COLOR
else:
color = GHOST_COLORS[ghostIndex]
edit(ghostImageParts[0], ('fill', color), ('outline', color))
self.moveEyes(self.getPosition(ghost), self.getDirection(ghost), ghostImageParts[-4:])
refresh()
def getPosition(self, agentState):
if agentState.configuration == None: return (-1000, -1000)
return agentState.getPosition()
def getDirection(self, agentState):
if agentState.configuration == None: return Directions.STOP
return agentState.configuration.getDirection()
def finish(self):
end_graphics()
def to_screen(self, point):
( x, y ) = point
#y = self.height - y
x = (x + 1)*self.gridSize
y = (self.height - y)*self.gridSize
return ( x, y )
# Fixes some TK issue with off-center circles
def to_screen2(self, point):
( x, y ) = point
#y = self.height - y
x = (x + 1)*self.gridSize
y = (self.height - y)*self.gridSize
return ( x, y )
def drawWalls(self, wallMatrix):
wallColor = WALL_COLOR
for xNum, x in enumerate(wallMatrix):
if self.capture and (xNum * 2) < wallMatrix.width: wallColor = TEAM_COLORS[0]
if self.capture and (xNum * 2) >= wallMatrix.width: wallColor = TEAM_COLORS[1]
for yNum, cell in enumerate(x):
if cell: # There's a wall here
pos = (xNum, yNum)
screen = self.to_screen(pos)
screen2 = self.to_screen2(pos)
# draw each quadrant of the square based on adjacent walls
wIsWall = self.isWall(xNum-1, yNum, wallMatrix)
eIsWall = self.isWall(xNum+1, yNum, wallMatrix)
nIsWall = self.isWall(xNum, yNum+1, wallMatrix)
sIsWall = self.isWall(xNum, yNum-1, wallMatrix)
nwIsWall = self.isWall(xNum-1, yNum+1, wallMatrix)
swIsWall = self.isWall(xNum-1, yNum-1, wallMatrix)
neIsWall = self.isWall(xNum+1, yNum+1, wallMatrix)
seIsWall = self.isWall(xNum+1, yNum-1, wallMatrix)
# NE quadrant
if (not nIsWall) and (not eIsWall):
# inner circle
circle(screen2, WALL_RADIUS * self.gridSize, wallColor, wallColor, (0,91), 'arc')
if (nIsWall) and (not eIsWall):
# vertical line
line(add(screen, (self.gridSize*WALL_RADIUS, 0)), add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(-0.5)-1)), wallColor)
if (not nIsWall) and (eIsWall):
# horizontal line
line(add(screen, (0, self.gridSize*(-1)*WALL_RADIUS)), add(screen, (self.gridSize*0.5+1, self.gridSize*(-1)*WALL_RADIUS)), wallColor)
if (nIsWall) and (eIsWall) and (not neIsWall):
# outer circle
circle(add(screen2, (self.gridSize*2*WALL_RADIUS, self.gridSize*(-2)*WALL_RADIUS)), WALL_RADIUS * self.gridSize-1, wallColor, wallColor, (180,271), 'arc')
line(add(screen, (self.gridSize*2*WALL_RADIUS-1, self.gridSize*(-1)*WALL_RADIUS)), add(screen, (self.gridSize*0.5+1, self.gridSize*(-1)*WALL_RADIUS)), wallColor)
line(add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(-2)*WALL_RADIUS+1)), add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(-0.5))), wallColor)
# NW quadrant
if (not nIsWall) and (not wIsWall):
# inner circle
circle(screen2, WALL_RADIUS * self.gridSize, wallColor, wallColor, (90,181), 'arc')
if (nIsWall) and (not wIsWall):
# vertical line
line(add(screen, (self.gridSize*(-1)*WALL_RADIUS, 0)), add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(-0.5)-1)), wallColor)
if (not nIsWall) and (wIsWall):
# horizontal line
line(add(screen, (0, self.gridSize*(-1)*WALL_RADIUS)), add(screen, (self.gridSize*(-0.5)-1, self.gridSize*(-1)*WALL_RADIUS)), wallColor)
if (nIsWall) and (wIsWall) and (not nwIsWall):
# outer circle
circle(add(screen2, (self.gridSize*(-2)*WALL_RADIUS, self.gridSize*(-2)*WALL_RADIUS)), WALL_RADIUS * self.gridSize-1, wallColor, wallColor, (270,361), 'arc')
line(add(screen, (self.gridSize*(-2)*WALL_RADIUS+1, self.gridSize*(-1)*WALL_RADIUS)), add(screen, (self.gridSize*(-0.5), self.gridSize*(-1)*WALL_RADIUS)), wallColor)
line(add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(-2)*WALL_RADIUS+1)), add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(-0.5))), wallColor)
# SE quadrant
if (not sIsWall) and (not eIsWall):
# inner circle
circle(screen2, WALL_RADIUS * self.gridSize, wallColor, wallColor, (270,361), 'arc')
if (sIsWall) and (not eIsWall):
# vertical line
line(add(screen, (self.gridSize*WALL_RADIUS, 0)), add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(0.5)+1)), wallColor)
if (not sIsWall) and (eIsWall):
# horizontal line
line(add(screen, (0, self.gridSize*(1)*WALL_RADIUS)), add(screen, (self.gridSize*0.5+1, self.gridSize*(1)*WALL_RADIUS)), wallColor)
if (sIsWall) and (eIsWall) and (not seIsWall):
# outer circle
circle(add(screen2, (self.gridSize*2*WALL_RADIUS, self.gridSize*(2)*WALL_RADIUS)), WALL_RADIUS * self.gridSize-1, wallColor, wallColor, (90,181), 'arc')
line(add(screen, (self.gridSize*2*WALL_RADIUS-1, self.gridSize*(1)*WALL_RADIUS)), add(screen, (self.gridSize*0.5, self.gridSize*(1)*WALL_RADIUS)), wallColor)
line(add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(2)*WALL_RADIUS-1)), add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(0.5))), wallColor)
# SW quadrant
if (not sIsWall) and (not wIsWall):
# inner circle
circle(screen2, WALL_RADIUS * self.gridSize, wallColor, wallColor, (180,271), 'arc')
if (sIsWall) and (not wIsWall):
# vertical line
line(add(screen, (self.gridSize*(-1)*WALL_RADIUS, 0)), add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(0.5)+1)), wallColor)
if (not sIsWall) and (wIsWall):
# horizontal line
line(add(screen, (0, self.gridSize*(1)*WALL_RADIUS)), add(screen, (self.gridSize*(-0.5)-1, self.gridSize*(1)*WALL_RADIUS)), wallColor)
if (sIsWall) and (wIsWall) and (not swIsWall):
# outer circle
circle(add(screen2, (self.gridSize*(-2)*WALL_RADIUS, self.gridSize*(2)*WALL_RADIUS)), WALL_RADIUS * self.gridSize-1, wallColor, wallColor, (0,91), 'arc')
line(add(screen, (self.gridSize*(-2)*WALL_RADIUS+1, self.gridSize*(1)*WALL_RADIUS)), add(screen, (self.gridSize*(-0.5), self.gridSize*(1)*WALL_RADIUS)), wallColor)
line(add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(2)*WALL_RADIUS-1)), add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(0.5))), wallColor)
def isWall(self, x, y, walls):
if x < 0 or y < 0:
return False
if x >= walls.width or y >= walls.height:
return False
return walls[x][y]
def drawFood(self, foodMatrix ):
foodImages = []
color = FOOD_COLOR
for xNum, x in enumerate(foodMatrix):
if self.capture and (xNum * 2) <= foodMatrix.width: color = TEAM_COLORS[0]
if self.capture and (xNum * 2) > foodMatrix.width: color = TEAM_COLORS[1]
imageRow = []
foodImages.append(imageRow)
for yNum, cell in enumerate(x):
if cell: # There's food here
screen = self.to_screen((xNum, yNum ))
dot = circle( screen,
FOOD_SIZE * self.gridSize,
outlineColor = color, fillColor = color,
width = 1)
imageRow.append(dot)
else:
imageRow.append(None)
return foodImages
def drawCapsules(self, capsules ):
capsuleImages = {}
for capsule in capsules:
( screen_x, screen_y ) = self.to_screen(capsule)
dot = circle( (screen_x, screen_y),
CAPSULE_SIZE * self.gridSize,
outlineColor = CAPSULE_COLOR,
fillColor = CAPSULE_COLOR,
width = 1)
capsuleImages[capsule] = dot
return capsuleImages
def removeFood(self, cell, foodImages ):
x, y = cell
remove_from_screen(foodImages[x][y])
def removeCapsule(self, cell, capsuleImages ):
x, y = cell
remove_from_screen(capsuleImages[(x, y)])
def drawExpandedCells(self, cells):
"""
Draws an overlay of expanded grid positions for search agents
"""
n = float(len(cells))
baseColor = [1.0, 0.0, 0.0]
self.clearExpandedCells()
self.expandedCells = []
for k, cell in enumerate(cells):
screenPos = self.to_screen( cell)
cellColor = formatColor(*[(n-k) * c * .5 / n + .25 for c in baseColor])
block = square(screenPos,
0.5 * self.gridSize,
color = cellColor,
filled = 1, behind=2)
self.expandedCells.append(block)
if self.frameTime < 0:
refresh()
def clearExpandedCells(self):
if 'expandedCells' in dir(self) and len(self.expandedCells) > 0:
for cell in self.expandedCells:
remove_from_screen(cell)
def updateDistributions(self, distributions):
"Draws an agent's belief distributions"
# copy all distributions so we don't change their state
distributions = map(lambda x: x.copy(), distributions)
if self.distributionImages == None:
self.drawDistributions(self.previousState)
for x in range(len(self.distributionImages)):
for y in range(len(self.distributionImages[0])):
image = self.distributionImages[x][y]
weights = [dist[ (x,y) ] for dist in distributions]
if sum(weights) != 0:
pass
# Fog of war
color = [0.0,0.0,0.0]
colors = GHOST_VEC_COLORS[1:] # With Pacman
if self.capture: colors = GHOST_VEC_COLORS
for weight, gcolor in zip(weights, colors):
color = [min(1.0, c + 0.95 * g * weight ** .3) for c,g in zip(color, gcolor)]
changeColor(image, formatColor(*color))
refresh()
class FirstPersonPacmanGraphics(PacmanGraphics):
def __init__(self, zoom = 1.0, showGhosts = True, capture = False, frameTime=0):
PacmanGraphics.__init__(self, zoom, frameTime=frameTime)
self.showGhosts = showGhosts
self.capture = capture
def initialize(self, state, isBlue = False):
self.isBlue = isBlue
PacmanGraphics.startGraphics(self, state)
# Initialize distribution images
walls = state.layout.walls
dist = []
self.layout = state.layout
# Draw the rest
self.distributionImages = None # initialize lazily
self.drawStaticObjects(state)
self.drawAgentObjects(state)
# Information
self.previousState = state
def lookAhead(self, config, state):
if config.getDirection() == 'Stop':
return
else:
pass
# Draw relevant ghosts
allGhosts = state.getGhostStates()
visibleGhosts = state.getVisibleGhosts()
for i, ghost in enumerate(allGhosts):
if ghost in visibleGhosts:
self.drawGhost(ghost, i)
else:
self.currentGhostImages[i] = None
def getGhostColor(self, ghost, ghostIndex):
return GHOST_COLORS[ghostIndex]
def getPosition(self, ghostState):
if not self.showGhosts and not ghostState.isPacman and ghostState.getPosition()[1] > 1:
return (-1000, -1000)
else:
return PacmanGraphics.getPosition(self, ghostState)
def add(x, y):
return (x[0] + y[0], x[1] + y[1])
# Saving graphical output
# -----------------------
# Note: to make an animated gif from this postscript output, try the command:
# convert -delay 7 -loop 1 -compress lzw -layers optimize frame* out.gif
# convert is part of imagemagick (freeware)
SAVE_POSTSCRIPT = False
POSTSCRIPT_OUTPUT_DIR = 'frames'
FRAME_NUMBER = 0
import os
def saveFrame():
"Saves the current graphical output as a postscript file"
global SAVE_POSTSCRIPT, FRAME_NUMBER, POSTSCRIPT_OUTPUT_DIR
if not SAVE_POSTSCRIPT: return
if not os.path.exists(POSTSCRIPT_OUTPUT_DIR): os.mkdir(POSTSCRIPT_OUTPUT_DIR)
name = os.path.join(POSTSCRIPT_OUTPUT_DIR, 'frame_%08d.ps' % FRAME_NUMBER)
FRAME_NUMBER += 1
writePostscript(name) # writes the current canvas
| bsd-3-clause |
indashnet/InDashNet.Open.UN2000 | android/external/chromium_org/third_party/mesa/src/src/mapi/glapi/gen/gl_x86_asm.py | 11 | 8663 | #!/usr/bin/env python
# (C) Copyright IBM Corporation 2004, 2005
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# on the rights to use, copy, modify, merge, publish, distribute, sub
# license, and/or sell copies of the Software, and to permit persons to whom
# the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice (including the next
# paragraph) shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
# IBM AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# Authors:
# Ian Romanick <idr@us.ibm.com>
import license
import gl_XML, glX_XML
import sys, getopt
class PrintGenericStubs(gl_XML.gl_print_base):
def __init__(self):
gl_XML.gl_print_base.__init__(self)
self.name = "gl_x86_asm.py (from Mesa)"
self.license = license.bsd_license_template % ( \
"""Copyright (C) 1999-2001 Brian Paul All Rights Reserved.
(C) Copyright IBM Corporation 2004, 2005""", "BRIAN PAUL, IBM")
return
def get_stack_size(self, f):
size = 0
for p in f.parameterIterator():
if p.is_padding:
continue
size += p.get_stack_size()
return size
def printRealHeader(self):
print '#include "x86/assyntax.h"'
print ''
print '#if defined(STDCALL_API)'
print '# if defined(USE_MGL_NAMESPACE)'
print '# define GL_PREFIX(n,n2) GLNAME(CONCAT(mgl,n2))'
print '# else'
print '# define GL_PREFIX(n,n2) GLNAME(CONCAT(gl,n2))'
print '# endif'
print '#else'
print '# if defined(USE_MGL_NAMESPACE)'
print '# define GL_PREFIX(n,n2) GLNAME(CONCAT(mgl,n))'
print '# define _glapi_Dispatch _mglapi_Dispatch'
print '# else'
print '# define GL_PREFIX(n,n2) GLNAME(CONCAT(gl,n))'
print '# endif'
print '#endif'
print ''
print '#define GL_OFFSET(x) CODEPTR(REGOFF(4 * x, EAX))'
print ''
print '#if defined(GNU_ASSEMBLER) && !defined(__DJGPP__) && !defined(__MINGW32__) && !defined(__APPLE__)'
print '#define GLOBL_FN(x) GLOBL x ; .type x, @function'
print '#else'
print '#define GLOBL_FN(x) GLOBL x'
print '#endif'
print ''
print '#if defined(HAVE_PTHREAD) || defined(WIN32)'
print '# define THREADS'
print '#endif'
print ''
print '#ifdef GLX_USE_TLS'
print ''
print '#ifdef GLX_X86_READONLY_TEXT'
print '# define CTX_INSNS MOV_L(GS:(EAX), EAX)'
print '#else'
print '# define CTX_INSNS NOP /* Pad for init_glapi_relocs() */'
print '#endif'
print ''
print '# define GL_STUB(fn,off,fn_alt)\t\t\t\\'
print 'ALIGNTEXT16;\t\t\t\t\t\t\\'
print 'GLOBL_FN(GL_PREFIX(fn, fn_alt));\t\t\t\\'
print 'GL_PREFIX(fn, fn_alt):\t\t\t\t\t\\'
print '\tCALL(_x86_get_dispatch) ;\t\t\t\\'
print '\tCTX_INSNS ; \\'
print '\tJMP(GL_OFFSET(off))'
print ''
print '#elif defined(HAVE_PTHREAD)'
print '# define GL_STUB(fn,off,fn_alt)\t\t\t\\'
print 'ALIGNTEXT16;\t\t\t\t\t\t\\'
print 'GLOBL_FN(GL_PREFIX(fn, fn_alt));\t\t\t\\'
print 'GL_PREFIX(fn, fn_alt):\t\t\t\t\t\\'
print '\tMOV_L(CONTENT(GLNAME(_glapi_Dispatch)), EAX) ;\t\\'
print '\tTEST_L(EAX, EAX) ;\t\t\t\t\\'
print '\tJE(1f) ;\t\t\t\t\t\\'
print '\tJMP(GL_OFFSET(off)) ;\t\t\t\t\\'
print '1:\tCALL(_x86_get_dispatch) ;\t\t\t\\'
print '\tJMP(GL_OFFSET(off))'
print '#elif defined(THREADS)'
print '# define GL_STUB(fn,off,fn_alt)\t\t\t\\'
print 'ALIGNTEXT16;\t\t\t\t\t\t\\'
print 'GLOBL_FN(GL_PREFIX(fn, fn_alt));\t\t\t\\'
print 'GL_PREFIX(fn, fn_alt):\t\t\t\t\t\\'
print '\tMOV_L(CONTENT(GLNAME(_glapi_Dispatch)), EAX) ;\t\\'
print '\tTEST_L(EAX, EAX) ;\t\t\t\t\\'
print '\tJE(1f) ;\t\t\t\t\t\\'
print '\tJMP(GL_OFFSET(off)) ;\t\t\t\t\\'
print '1:\tCALL(_glapi_get_dispatch) ;\t\t\t\\'
print '\tJMP(GL_OFFSET(off))'
print '#else /* Non-threaded version. */'
print '# define GL_STUB(fn,off,fn_alt)\t\t\t\\'
print 'ALIGNTEXT16;\t\t\t\t\t\t\\'
print 'GLOBL_FN(GL_PREFIX(fn, fn_alt));\t\t\t\\'
print 'GL_PREFIX(fn, fn_alt):\t\t\t\t\t\\'
print '\tMOV_L(CONTENT(GLNAME(_glapi_Dispatch)), EAX) ;\t\\'
print '\tJMP(GL_OFFSET(off))'
print '#endif'
print ''
print '#ifdef HAVE_ALIAS'
print '# define GL_STUB_ALIAS(fn,off,fn_alt,alias,alias_alt)\t\\'
print '\t.globl\tGL_PREFIX(fn, fn_alt) ;\t\t\t\\'
print '\t.set\tGL_PREFIX(fn, fn_alt), GL_PREFIX(alias, alias_alt)'
print '#else'
print '# define GL_STUB_ALIAS(fn,off,fn_alt,alias,alias_alt)\t\\'
print ' GL_STUB(fn, off, fn_alt)'
print '#endif'
print ''
print 'SEG_TEXT'
print ''
print '#ifdef GLX_USE_TLS'
print ''
print '\tGLOBL\tGLNAME(_x86_get_dispatch)'
print '\tHIDDEN(GLNAME(_x86_get_dispatch))'
print 'ALIGNTEXT16'
print 'GLNAME(_x86_get_dispatch):'
print '\tcall 1f'
print '1:\tpopl %eax'
print '\taddl $_GLOBAL_OFFSET_TABLE_+[.-1b], %eax'
print '\tmovl _glapi_tls_Dispatch@GOTNTPOFF(%eax), %eax'
print '\tret'
print ''
print '#elif defined(HAVE_PTHREAD)'
print 'EXTERN GLNAME(_glapi_Dispatch)'
print 'EXTERN GLNAME(_gl_DispatchTSD)'
print 'EXTERN GLNAME(pthread_getspecific)'
print ''
print 'ALIGNTEXT16'
print 'GLNAME(_x86_get_dispatch):'
print '\tSUB_L(CONST(24), ESP)'
print '\tPUSH_L(GLNAME(_gl_DispatchTSD))'
print '\tCALL(GLNAME(pthread_getspecific))'
print '\tADD_L(CONST(28), ESP)'
print '\tRET'
print '#elif defined(THREADS)'
print 'EXTERN GLNAME(_glapi_get_dispatch)'
print '#endif'
print ''
print '#if defined( GLX_USE_TLS ) && !defined( GLX_X86_READONLY_TEXT )'
print '\t\t.section\twtext, "awx", @progbits'
print '#endif /* defined( GLX_USE_TLS ) */'
print ''
print '\t\tALIGNTEXT16'
print '\t\tGLOBL GLNAME(gl_dispatch_functions_start)'
print '\t\tHIDDEN(GLNAME(gl_dispatch_functions_start))'
print 'GLNAME(gl_dispatch_functions_start):'
print ''
return
def printRealFooter(self):
print ''
print '\t\tGLOBL\tGLNAME(gl_dispatch_functions_end)'
print '\t\tHIDDEN(GLNAME(gl_dispatch_functions_end))'
print '\t\tALIGNTEXT16'
print 'GLNAME(gl_dispatch_functions_end):'
print ''
print '#if defined(GLX_USE_TLS) && defined(__linux__)'
print ' .section ".note.ABI-tag", "a"'
print ' .p2align 2'
print ' .long 1f - 0f /* name length */'
print ' .long 3f - 2f /* data length */'
print ' .long 1 /* note length */'
print '0: .asciz "GNU" /* vendor name */'
print '1: .p2align 2'
print '2: .long 0 /* note data: the ABI tag */'
print ' .long 2,4,20 /* Minimum kernel version w/TLS */'
print '3: .p2align 2 /* pad out section */'
print '#endif /* GLX_USE_TLS */'
print ''
print '#if defined (__ELF__) && defined (__linux__)'
print ' .section .note.GNU-stack,"",%progbits'
print '#endif'
return
def printBody(self, api):
for f in api.functionIterateByOffset():
name = f.dispatch_name()
stack = self.get_stack_size(f)
alt = "%s@%u" % (name, stack)
print '\tGL_STUB(%s, %d, %s)' % (name, f.offset, alt)
if not f.is_static_entry_point(f.name):
print '\tHIDDEN(GL_PREFIX(%s, %s))' % (name, alt)
for f in api.functionIterateByOffset():
name = f.dispatch_name()
stack = self.get_stack_size(f)
alt = "%s@%u" % (name, stack)
for n in f.entry_points:
if f.is_static_entry_point(n):
if n != f.name:
alt2 = "%s@%u" % (n, stack)
text = '\tGL_STUB_ALIAS(%s, %d, %s, %s, %s)' % (n, f.offset, alt2, name, alt)
if f.has_different_protocol(n):
print '#ifndef GLX_INDIRECT_RENDERING'
print text
print '#endif'
else:
print text
return
def show_usage():
print "Usage: %s [-f input_file_name] [-m output_mode]" % sys.argv[0]
sys.exit(1)
if __name__ == '__main__':
file_name = "gl_API.xml"
mode = "generic"
try:
(args, trail) = getopt.getopt(sys.argv[1:], "m:f:")
except Exception,e:
show_usage()
for (arg,val) in args:
if arg == '-m':
mode = val
elif arg == "-f":
file_name = val
if mode == "generic":
printer = PrintGenericStubs()
else:
print "ERROR: Invalid mode \"%s\" specified." % mode
show_usage()
api = gl_XML.parse_GL_API(file_name, glX_XML.glx_item_factory())
printer.Print(api)
| apache-2.0 |
rschnapka/odoo | addons/hr_timesheet_invoice/report/hr_timesheet_invoice_report.py | 52 | 9652 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
from openerp.tools.sql import drop_view_if_exists
class report_timesheet_line(osv.osv):
_name = "report.timesheet.line"
_description = "Timesheet Line"
_auto = False
_columns = {
'name': fields.char('Year',size=64,required=False, readonly=True),
'user_id': fields.many2one('res.users', 'User', readonly=True),
'date': fields.date('Date', readonly=True),
'day': fields.char('Day', size=128, readonly=True),
'quantity': fields.float('Time', readonly=True),
'cost': fields.float('Cost', readonly=True),
'product_id': fields.many2one('product.product', 'Product',readonly=True),
'account_id': fields.many2one('account.analytic.account', 'Analytic Account', readonly=True),
'general_account_id': fields.many2one('account.account', 'General Account', readonly=True),
'invoice_id': fields.many2one('account.invoice', 'Invoiced', readonly=True),
'month': fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'),
('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month',readonly=True),
}
_order = 'name desc,user_id desc'
def init(self, cr):
drop_view_if_exists(cr, 'report_timesheet_line')
cr.execute("""
create or replace view report_timesheet_line as (
select
min(l.id) as id,
l.date as date,
to_char(l.date,'YYYY') as name,
to_char(l.date,'MM') as month,
l.user_id,
to_char(l.date, 'YYYY-MM-DD') as day,
l.invoice_id,
l.product_id,
l.account_id,
l.general_account_id,
sum(l.unit_amount) as quantity,
sum(l.amount) as cost
from
account_analytic_line l
where
l.user_id is not null
group by
l.date,
l.user_id,
l.product_id,
l.account_id,
l.general_account_id,
l.invoice_id
)
""")
report_timesheet_line()
class report_timesheet_user(osv.osv):
_name = "report_timesheet.user"
_description = "Timesheet per day"
_auto = False
_columns = {
'name': fields.char('Year',size=64,required=False, readonly=True),
'user_id':fields.many2one('res.users', 'User', readonly=True),
'quantity': fields.float('Time', readonly=True),
'cost': fields.float('Cost', readonly=True),
'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'),
('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month',readonly=True),
}
_order = 'name desc,user_id desc'
def init(self, cr):
drop_view_if_exists(cr, 'report_timesheet_user')
cr.execute("""
create or replace view report_timesheet_user as (
select
min(l.id) as id,
to_char(l.date,'YYYY') as name,
to_char(l.date,'MM') as month,
l.user_id,
sum(l.unit_amount) as quantity,
sum(l.amount) as cost
from
account_analytic_line l
where
user_id is not null
group by l.date, to_char(l.date,'YYYY'),to_char(l.date,'MM'), l.user_id
)
""")
report_timesheet_user()
class report_timesheet_account(osv.osv):
_name = "report_timesheet.account"
_description = "Timesheet per account"
_auto = False
_columns = {
'name': fields.char('Year',size=64,required=False, readonly=True),
'user_id':fields.many2one('res.users', 'User', readonly=True),
'account_id':fields.many2one('account.analytic.account', 'Analytic Account', readonly=True),
'quantity': fields.float('Time', readonly=True),
'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'),
('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month',readonly=True),
}
_order = 'name desc,account_id desc,user_id desc'
def init(self, cr):
drop_view_if_exists(cr, 'report_timesheet_account')
cr.execute("""
create or replace view report_timesheet_account as (
select
min(id) as id,
to_char(create_date, 'YYYY') as name,
to_char(create_date,'MM') as month,
user_id,
account_id,
sum(unit_amount) as quantity
from
account_analytic_line
group by
to_char(create_date, 'YYYY'),to_char(create_date, 'MM'), user_id, account_id
)
""")
report_timesheet_account()
class report_timesheet_account_date(osv.osv):
_name = "report_timesheet.account.date"
_description = "Daily timesheet per account"
_auto = False
_columns = {
'name': fields.char('Year',size=64,required=False, readonly=True),
'user_id':fields.many2one('res.users', 'User', readonly=True),
'account_id':fields.many2one('account.analytic.account', 'Analytic Account', readonly=True),
'quantity': fields.float('Time', readonly=True),
'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'),
('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month',readonly=True),
}
_order = 'name desc,account_id desc,user_id desc'
def init(self, cr):
drop_view_if_exists(cr, 'report_timesheet_account_date')
cr.execute("""
create or replace view report_timesheet_account_date as (
select
min(id) as id,
to_char(date,'YYYY') as name,
to_char(date,'MM') as month,
user_id,
account_id,
sum(unit_amount) as quantity
from
account_analytic_line
group by
to_char(date,'YYYY'),to_char(date,'MM'), user_id, account_id
)
""")
report_timesheet_account_date()
class report_timesheet_invoice(osv.osv):
_name = "report_timesheet.invoice"
_description = "Costs to invoice"
_auto = False
_columns = {
'user_id':fields.many2one('res.users', 'User', readonly=True),
'account_id':fields.many2one('account.analytic.account', 'Project', readonly=True),
'manager_id':fields.many2one('res.users', 'Manager', readonly=True),
'quantity': fields.float('Time', readonly=True),
'amount_invoice': fields.float('To invoice', readonly=True)
}
_rec_name = 'user_id'
_order = 'user_id desc'
def init(self, cr):
drop_view_if_exists(cr, 'report_timesheet_invoice')
cr.execute("""
create or replace view report_timesheet_invoice as (
select
min(l.id) as id,
l.user_id as user_id,
l.account_id as account_id,
a.user_id as manager_id,
sum(l.unit_amount) as quantity,
sum(l.unit_amount * t.list_price) as amount_invoice
from account_analytic_line l
left join hr_timesheet_invoice_factor f on (l.to_invoice=f.id)
left join account_analytic_account a on (l.account_id=a.id)
left join product_product p on (l.to_invoice=f.id)
left join product_template t on (l.to_invoice=f.id)
where
l.to_invoice is not null and
l.invoice_id is null
group by
l.user_id,
l.account_id,
a.user_id
)
""")
report_timesheet_invoice()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
kmacinnis/sympy | sympy/assumptions/assume.py | 3 | 5514 | from __future__ import print_function, division
import inspect
from sympy.core.cache import cacheit
from sympy.core.singleton import S
from sympy.logic.boolalg import Boolean
from sympy.utilities.source import get_class
from contextlib import contextmanager
class AssumptionsContext(set):
"""Set representing assumptions.
This is used to represent global assumptions, but you can also use this
class to create your own local assumptions contexts. It is basically a thin
wrapper to Python's set, so see its documentation for advanced usage.
Examples
========
>>> from sympy import AppliedPredicate, Q
>>> from sympy.assumptions.assume import global_assumptions
>>> global_assumptions
AssumptionsContext()
>>> from sympy.abc import x
>>> global_assumptions.add(Q.real(x))
>>> global_assumptions
AssumptionsContext([Q.real(x)])
>>> global_assumptions.remove(Q.real(x))
>>> global_assumptions
AssumptionsContext()
>>> global_assumptions.clear()
"""
def add(self, *assumptions):
"""Add an assumption."""
for a in assumptions:
super(AssumptionsContext, self).add(a)
global_assumptions = AssumptionsContext()
class AppliedPredicate(Boolean):
"""The class of expressions resulting from applying a Predicate.
>>> from sympy import Q, Symbol
>>> x = Symbol('x')
>>> Q.integer(x)
Q.integer(x)
>>> type(Q.integer(x))
<class 'sympy.assumptions.assume.AppliedPredicate'>
"""
__slots__ = []
def __new__(cls, predicate, arg):
return Boolean.__new__(cls, predicate, arg)
is_Atom = True # do not attempt to decompose this
@property
def arg(self):
"""
Return the expression used by this assumption.
Examples
========
>>> from sympy import Q, Symbol
>>> x = Symbol('x')
>>> a = Q.integer(x + 1)
>>> a.arg
x + 1
"""
return self._args[1]
@property
def args(self):
return self._args[1:]
@property
def func(self):
return self._args[0]
@cacheit
def sort_key(self, order=None):
return self.class_key(), (2, (self.func.name, self.arg.sort_key())), S.One.sort_key(), S.One
def __eq__(self, other):
if type(other) is AppliedPredicate:
return self._args == other._args
return False
def __hash__(self):
return super(AppliedPredicate, self).__hash__()
def _eval_ask(self, assumptions):
return self.func.eval(self.arg, assumptions)
class Predicate(Boolean):
"""A predicate is a function that returns a boolean value.
Predicates merely wrap their argument and remain unevaluated:
>>> from sympy import Q, ask, Symbol, S
>>> x = Symbol('x')
>>> Q.prime(7)
Q.prime(7)
To obtain the truth value of an expression containing predicates, use
the function `ask`:
>>> ask(Q.prime(7))
True
The tautological predicate `Q.is_true` can be used to wrap other objects:
>>> Q.is_true(x > 1)
Q.is_true(x > 1)
>>> Q.is_true(S(1) < x)
Q.is_true(1 < x)
"""
is_Atom = True
def __new__(cls, name, handlers=None):
obj = Boolean.__new__(cls)
obj.name = name
obj.handlers = handlers or []
return obj
def _hashable_content(self):
return (self.name,)
def __getnewargs__(self):
return (self.name,)
def __call__(self, expr):
return AppliedPredicate(self, expr)
def add_handler(self, handler):
self.handlers.append(handler)
def remove_handler(self, handler):
self.handlers.remove(handler)
@cacheit
def sort_key(self, order=None):
return self.class_key(), (1, (self.name,)), S.One.sort_key(), S.One
def eval(self, expr, assumptions=True):
"""
Evaluate self(expr) under the given assumptions.
This uses only direct resolution methods, not logical inference.
"""
res, _res = None, None
mro = inspect.getmro(type(expr))
for handler in self.handlers:
cls = get_class(handler)
for subclass in mro:
try:
eval = getattr(cls, subclass.__name__)
except AttributeError:
continue
res = eval(expr, assumptions)
if _res is None:
_res = res
elif res is None:
# since first resolutor was conclusive, we keep that value
res = _res
else:
# only check consistency if both resolutors have concluded
if _res != res:
raise ValueError('incompatible resolutors')
break
return res
@contextmanager
def assuming(*assumptions):
""" Context manager for assumptions
>>> from sympy.assumptions import assuming, Q, ask
>>> from sympy.abc import x, y
>>> print(ask(Q.integer(x + y)))
None
>>> with assuming(Q.integer(x), Q.integer(y)):
... print(ask(Q.integer(x + y)))
True
"""
old_global_assumptions = global_assumptions.copy()
global_assumptions.update(assumptions)
try:
yield
finally:
global_assumptions.clear()
global_assumptions.update(old_global_assumptions)
| bsd-3-clause |
eirannejad/pyRevit | site-packages/sqlalchemy/pool.py | 32 | 49813 | # sqlalchemy/pool.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Connection pooling for DB-API connections.
Provides a number of connection pool implementations for a variety of
usage scenarios and thread behavior requirements imposed by the
application, DB-API or database itself.
Also provides a DB-API 2.0 connection proxying mechanism allowing
regular DB-API connect() methods to be transparently managed by a
SQLAlchemy connection pool.
"""
import time
import traceback
import weakref
from . import exc, log, event, interfaces, util
from .util import queue as sqla_queue
from .util import threading, memoized_property, \
chop_traceback
from collections import deque
proxies = {}
def manage(module, **params):
r"""Return a proxy for a DB-API module that automatically
pools connections.
Given a DB-API 2.0 module and pool management parameters, returns
a proxy for the module that will automatically pool connections,
creating new connection pools for each distinct set of connection
arguments sent to the decorated module's connect() function.
:param module: a DB-API 2.0 database module
:param poolclass: the class used by the pool module to provide
pooling. Defaults to :class:`.QueuePool`.
:param \**params: will be passed through to *poolclass*
"""
try:
return proxies[module]
except KeyError:
return proxies.setdefault(module, _DBProxy(module, **params))
def clear_managers():
"""Remove all current DB-API 2.0 managers.
All pools and connections are disposed.
"""
for manager in proxies.values():
manager.close()
proxies.clear()
reset_rollback = util.symbol('reset_rollback')
reset_commit = util.symbol('reset_commit')
reset_none = util.symbol('reset_none')
class _ConnDialect(object):
"""partial implementation of :class:`.Dialect`
which provides DBAPI connection methods.
When a :class:`.Pool` is combined with an :class:`.Engine`,
the :class:`.Engine` replaces this with its own
:class:`.Dialect`.
"""
def do_rollback(self, dbapi_connection):
dbapi_connection.rollback()
def do_commit(self, dbapi_connection):
dbapi_connection.commit()
def do_close(self, dbapi_connection):
dbapi_connection.close()
class Pool(log.Identified):
"""Abstract base class for connection pools."""
_dialect = _ConnDialect()
def __init__(self,
creator, recycle=-1, echo=None,
use_threadlocal=False,
logging_name=None,
reset_on_return=True,
listeners=None,
events=None,
dialect=None,
_dispatch=None):
"""
Construct a Pool.
:param creator: a callable function that returns a DB-API
connection object. The function will be called with
parameters.
:param recycle: If set to non -1, number of seconds between
connection recycling, which means upon checkout, if this
timeout is surpassed the connection will be closed and
replaced with a newly opened connection. Defaults to -1.
:param logging_name: String identifier which will be used within
the "name" field of logging records generated within the
"sqlalchemy.pool" logger. Defaults to a hexstring of the object's
id.
:param echo: If True, connections being pulled and retrieved
from the pool will be logged to the standard output, as well
as pool sizing information. Echoing can also be achieved by
enabling logging for the "sqlalchemy.pool"
namespace. Defaults to False.
:param use_threadlocal: If set to True, repeated calls to
:meth:`connect` within the same application thread will be
guaranteed to return the same connection object, if one has
already been retrieved from the pool and has not been
returned yet. Offers a slight performance advantage at the
cost of individual transactions by default. The
:meth:`.Pool.unique_connection` method is provided to return
a consistently unique connection to bypass this behavior
when the flag is set.
.. warning:: The :paramref:`.Pool.use_threadlocal` flag
**does not affect the behavior** of :meth:`.Engine.connect`.
:meth:`.Engine.connect` makes use of the
:meth:`.Pool.unique_connection` method which **does not use thread
local context**. To produce a :class:`.Connection` which refers
to the :meth:`.Pool.connect` method, use
:meth:`.Engine.contextual_connect`.
Note that other SQLAlchemy connectivity systems such as
:meth:`.Engine.execute` as well as the orm
:class:`.Session` make use of
:meth:`.Engine.contextual_connect` internally, so these functions
are compatible with the :paramref:`.Pool.use_threadlocal` setting.
.. seealso::
:ref:`threadlocal_strategy` - contains detail on the
"threadlocal" engine strategy, which provides a more comprehensive
approach to "threadlocal" connectivity for the specific
use case of using :class:`.Engine` and :class:`.Connection` objects
directly.
:param reset_on_return: Determine steps to take on
connections as they are returned to the pool.
reset_on_return can have any of these values:
* ``"rollback"`` - call rollback() on the connection,
to release locks and transaction resources.
This is the default value. The vast majority
of use cases should leave this value set.
* ``True`` - same as 'rollback', this is here for
backwards compatibility.
* ``"commit"`` - call commit() on the connection,
to release locks and transaction resources.
A commit here may be desirable for databases that
cache query plans if a commit is emitted,
such as Microsoft SQL Server. However, this
value is more dangerous than 'rollback' because
any data changes present on the transaction
are committed unconditionally.
* ``None`` - don't do anything on the connection.
This setting should only be made on a database
that has no transaction support at all,
namely MySQL MyISAM. By not doing anything,
performance can be improved. This
setting should **never be selected** for a
database that supports transactions,
as it will lead to deadlocks and stale
state.
* ``"none"`` - same as ``None``
.. versionadded:: 0.9.10
* ``False`` - same as None, this is here for
backwards compatibility.
.. versionchanged:: 0.7.6
:paramref:`.Pool.reset_on_return` accepts ``"rollback"``
and ``"commit"`` arguments.
:param events: a list of 2-tuples, each of the form
``(callable, target)`` which will be passed to :func:`.event.listen`
upon construction. Provided here so that event listeners
can be assigned via :func:`.create_engine` before dialect-level
listeners are applied.
:param listeners: Deprecated. A list of
:class:`~sqlalchemy.interfaces.PoolListener`-like objects or
dictionaries of callables that receive events when DB-API
connections are created, checked out and checked in to the
pool. This has been superseded by
:func:`~sqlalchemy.event.listen`.
:param dialect: a :class:`.Dialect` that will handle the job
of calling rollback(), close(), or commit() on DBAPI connections.
If omitted, a built-in "stub" dialect is used. Applications that
make use of :func:`~.create_engine` should not use this parameter
as it is handled by the engine creation strategy.
.. versionadded:: 1.1 - ``dialect`` is now a public parameter
to the :class:`.Pool`.
"""
if logging_name:
self.logging_name = self._orig_logging_name = logging_name
else:
self._orig_logging_name = None
log.instance_logger(self, echoflag=echo)
self._threadconns = threading.local()
self._creator = creator
self._recycle = recycle
self._invalidate_time = 0
self._use_threadlocal = use_threadlocal
if reset_on_return in ('rollback', True, reset_rollback):
self._reset_on_return = reset_rollback
elif reset_on_return in ('none', None, False, reset_none):
self._reset_on_return = reset_none
elif reset_on_return in ('commit', reset_commit):
self._reset_on_return = reset_commit
else:
raise exc.ArgumentError(
"Invalid value for 'reset_on_return': %r"
% reset_on_return)
self.echo = echo
if _dispatch:
self.dispatch._update(_dispatch, only_propagate=False)
if dialect:
self._dialect = dialect
if events:
for fn, target in events:
event.listen(self, target, fn)
if listeners:
util.warn_deprecated(
"The 'listeners' argument to Pool (and "
"create_engine()) is deprecated. Use event.listen().")
for l in listeners:
self.add_listener(l)
@property
def _creator(self):
return self.__dict__['_creator']
@_creator.setter
def _creator(self, creator):
self.__dict__['_creator'] = creator
self._invoke_creator = self._should_wrap_creator(creator)
def _should_wrap_creator(self, creator):
"""Detect if creator accepts a single argument, or is sent
as a legacy style no-arg function.
"""
try:
argspec = util.get_callable_argspec(self._creator, no_self=True)
except TypeError:
return lambda crec: creator()
defaulted = argspec[3] is not None and len(argspec[3]) or 0
positionals = len(argspec[0]) - defaulted
# look for the exact arg signature that DefaultStrategy
# sends us
if (argspec[0], argspec[3]) == (['connection_record'], (None,)):
return creator
# or just a single positional
elif positionals == 1:
return creator
# all other cases, just wrap and assume legacy "creator" callable
# thing
else:
return lambda crec: creator()
def _close_connection(self, connection):
self.logger.debug("Closing connection %r", connection)
try:
self._dialect.do_close(connection)
except Exception:
self.logger.error("Exception closing connection %r",
connection, exc_info=True)
@util.deprecated(
2.7, "Pool.add_listener is deprecated. Use event.listen()")
def add_listener(self, listener):
"""Add a :class:`.PoolListener`-like object to this pool.
``listener`` may be an object that implements some or all of
PoolListener, or a dictionary of callables containing implementations
of some or all of the named methods in PoolListener.
"""
interfaces.PoolListener._adapt_listener(self, listener)
def unique_connection(self):
"""Produce a DBAPI connection that is not referenced by any
thread-local context.
This method is equivalent to :meth:`.Pool.connect` when the
:paramref:`.Pool.use_threadlocal` flag is not set to True.
When :paramref:`.Pool.use_threadlocal` is True, the
:meth:`.Pool.unique_connection` method provides a means of bypassing
the threadlocal context.
"""
return _ConnectionFairy._checkout(self)
def _create_connection(self):
"""Called by subclasses to create a new ConnectionRecord."""
return _ConnectionRecord(self)
def _invalidate(self, connection, exception=None):
"""Mark all connections established within the generation
of the given connection as invalidated.
If this pool's last invalidate time is before when the given
connection was created, update the timestamp til now. Otherwise,
no action is performed.
Connections with a start time prior to this pool's invalidation
time will be recycled upon next checkout.
"""
rec = getattr(connection, "_connection_record", None)
if not rec or self._invalidate_time < rec.starttime:
self._invalidate_time = time.time()
if getattr(connection, 'is_valid', False):
connection.invalidate(exception)
def recreate(self):
"""Return a new :class:`.Pool`, of the same class as this one
and configured with identical creation arguments.
This method is used in conjunction with :meth:`dispose`
to close out an entire :class:`.Pool` and create a new one in
its place.
"""
raise NotImplementedError()
def dispose(self):
"""Dispose of this pool.
This method leaves the possibility of checked-out connections
remaining open, as it only affects connections that are
idle in the pool.
See also the :meth:`Pool.recreate` method.
"""
raise NotImplementedError()
def connect(self):
"""Return a DBAPI connection from the pool.
The connection is instrumented such that when its
``close()`` method is called, the connection will be returned to
the pool.
"""
if not self._use_threadlocal:
return _ConnectionFairy._checkout(self)
try:
rec = self._threadconns.current()
except AttributeError:
pass
else:
if rec is not None:
return rec._checkout_existing()
return _ConnectionFairy._checkout(self, self._threadconns)
def _return_conn(self, record):
"""Given a _ConnectionRecord, return it to the :class:`.Pool`.
This method is called when an instrumented DBAPI connection
has its ``close()`` method called.
"""
if self._use_threadlocal:
try:
del self._threadconns.current
except AttributeError:
pass
self._do_return_conn(record)
def _do_get(self):
"""Implementation for :meth:`get`, supplied by subclasses."""
raise NotImplementedError()
def _do_return_conn(self, conn):
"""Implementation for :meth:`return_conn`, supplied by subclasses."""
raise NotImplementedError()
def status(self):
raise NotImplementedError()
class _ConnectionRecord(object):
"""Internal object which maintains an individual DBAPI connection
referenced by a :class:`.Pool`.
The :class:`._ConnectionRecord` object always exists for any particular
DBAPI connection whether or not that DBAPI connection has been
"checked out". This is in contrast to the :class:`._ConnectionFairy`
which is only a public facade to the DBAPI connection while it is checked
out.
A :class:`._ConnectionRecord` may exist for a span longer than that
of a single DBAPI connection. For example, if the
:meth:`._ConnectionRecord.invalidate`
method is called, the DBAPI connection associated with this
:class:`._ConnectionRecord`
will be discarded, but the :class:`._ConnectionRecord` may be used again,
in which case a new DBAPI connection is produced when the :class:`.Pool`
next uses this record.
The :class:`._ConnectionRecord` is delivered along with connection
pool events, including :meth:`.PoolEvents.connect` and
:meth:`.PoolEvents.checkout`, however :class:`._ConnectionRecord` still
remains an internal object whose API and internals may change.
.. seealso::
:class:`._ConnectionFairy`
"""
def __init__(self, pool, connect=True):
self.__pool = pool
if connect:
self.__connect(first_connect_check=True)
self.finalize_callback = deque()
fairy_ref = None
starttime = None
connection = None
"""A reference to the actual DBAPI connection being tracked.
May be ``None`` if this :class:`._ConnectionRecord` has been marked
as invalidated; a new DBAPI connection may replace it if the owning
pool calls upon this :class:`._ConnectionRecord` to reconnect.
"""
_soft_invalidate_time = 0
@util.memoized_property
def info(self):
"""The ``.info`` dictionary associated with the DBAPI connection.
This dictionary is shared among the :attr:`._ConnectionFairy.info`
and :attr:`.Connection.info` accessors.
.. note::
The lifespan of this dictionary is linked to the
DBAPI connection itself, meaning that it is **discarded** each time
the DBAPI connection is closed and/or invalidated. The
:attr:`._ConnectionRecord.record_info` dictionary remains
persistent throughout the lifespan of the
:class:`._ConnectionRecord` container.
"""
return {}
@util.memoized_property
def record_info(self):
"""An "info' dictionary associated with the connection record
itself.
Unlike the :attr:`._ConnectionRecord.info` dictionary, which is linked
to the lifespan of the DBAPI connection, this dictionary is linked
to the lifespan of the :class:`._ConnectionRecord` container itself
and will remain persisent throughout the life of the
:class:`._ConnectionRecord`.
.. versionadded:: 1.1
"""
return {}
@classmethod
def checkout(cls, pool):
rec = pool._do_get()
try:
dbapi_connection = rec.get_connection()
except:
with util.safe_reraise():
rec.checkin()
echo = pool._should_log_debug()
fairy = _ConnectionFairy(dbapi_connection, rec, echo)
rec.fairy_ref = weakref.ref(
fairy,
lambda ref: _finalize_fairy and
_finalize_fairy(
dbapi_connection,
rec, pool, ref, echo)
)
_refs.add(rec)
if echo:
pool.logger.debug("Connection %r checked out from pool",
dbapi_connection)
return fairy
def checkin(self):
self.fairy_ref = None
connection = self.connection
pool = self.__pool
while self.finalize_callback:
finalizer = self.finalize_callback.pop()
finalizer(connection)
if pool.dispatch.checkin:
pool.dispatch.checkin(connection, self)
pool._return_conn(self)
@property
def in_use(self):
return self.fairy_ref is not None
@property
def last_connect_time(self):
return self.starttime
def close(self):
if self.connection is not None:
self.__close()
def invalidate(self, e=None, soft=False):
"""Invalidate the DBAPI connection held by this :class:`._ConnectionRecord`.
This method is called for all connection invalidations, including
when the :meth:`._ConnectionFairy.invalidate` or
:meth:`.Connection.invalidate` methods are called, as well as when any
so-called "automatic invalidation" condition occurs.
:param e: an exception object indicating a reason for the invalidation.
:param soft: if True, the connection isn't closed; instead, this
connection will be recycled on next checkout.
.. versionadded:: 1.0.3
.. seealso::
:ref:`pool_connection_invalidation`
"""
# already invalidated
if self.connection is None:
return
if soft:
self.__pool.dispatch.soft_invalidate(self.connection, self, e)
else:
self.__pool.dispatch.invalidate(self.connection, self, e)
if e is not None:
self.__pool.logger.info(
"%sInvalidate connection %r (reason: %s:%s)",
"Soft " if soft else "",
self.connection, e.__class__.__name__, e)
else:
self.__pool.logger.info(
"%sInvalidate connection %r",
"Soft " if soft else "",
self.connection)
if soft:
self._soft_invalidate_time = time.time()
else:
self.__close()
self.connection = None
def get_connection(self):
recycle = False
if self.connection is None:
self.info.clear()
self.__connect()
elif self.__pool._recycle > -1 and \
time.time() - self.starttime > self.__pool._recycle:
self.__pool.logger.info(
"Connection %r exceeded timeout; recycling",
self.connection)
recycle = True
elif self.__pool._invalidate_time > self.starttime:
self.__pool.logger.info(
"Connection %r invalidated due to pool invalidation; " +
"recycling",
self.connection
)
recycle = True
elif self._soft_invalidate_time > self.starttime:
self.__pool.logger.info(
"Connection %r invalidated due to local soft invalidation; " +
"recycling",
self.connection
)
recycle = True
if recycle:
self.__close()
self.info.clear()
self.__connect()
return self.connection
def __close(self):
self.finalize_callback.clear()
if self.__pool.dispatch.close:
self.__pool.dispatch.close(self.connection, self)
self.__pool._close_connection(self.connection)
self.connection = None
def __connect(self, first_connect_check=False):
pool = self.__pool
# ensure any existing connection is removed, so that if
# creator fails, this attribute stays None
self.connection = None
try:
self.starttime = time.time()
connection = pool._invoke_creator(self)
pool.logger.debug("Created new connection %r", connection)
self.connection = connection
except Exception as e:
pool.logger.debug("Error on connect(): %s", e)
raise
else:
if first_connect_check:
pool.dispatch.first_connect.\
for_modify(pool.dispatch).\
exec_once(self.connection, self)
if pool.dispatch.connect:
pool.dispatch.connect(self.connection, self)
def _finalize_fairy(connection, connection_record,
pool, ref, echo, fairy=None):
"""Cleanup for a :class:`._ConnectionFairy` whether or not it's already
been garbage collected.
"""
_refs.discard(connection_record)
if ref is not None and \
connection_record.fairy_ref is not ref:
return
if connection is not None:
if connection_record and echo:
pool.logger.debug("Connection %r being returned to pool",
connection)
try:
fairy = fairy or _ConnectionFairy(
connection, connection_record, echo)
assert fairy.connection is connection
fairy._reset(pool)
# Immediately close detached instances
if not connection_record:
if pool.dispatch.close_detached:
pool.dispatch.close_detached(connection)
pool._close_connection(connection)
except BaseException as e:
pool.logger.error(
"Exception during reset or similar", exc_info=True)
if connection_record:
connection_record.invalidate(e=e)
if not isinstance(e, Exception):
raise
if connection_record:
connection_record.checkin()
_refs = set()
class _ConnectionFairy(object):
"""Proxies a DBAPI connection and provides return-on-dereference
support.
This is an internal object used by the :class:`.Pool` implementation
to provide context management to a DBAPI connection delivered by
that :class:`.Pool`.
The name "fairy" is inspired by the fact that the
:class:`._ConnectionFairy` object's lifespan is transitory, as it lasts
only for the length of a specific DBAPI connection being checked out from
the pool, and additionally that as a transparent proxy, it is mostly
invisible.
.. seealso::
:class:`._ConnectionRecord`
"""
def __init__(self, dbapi_connection, connection_record, echo):
self.connection = dbapi_connection
self._connection_record = connection_record
self._echo = echo
connection = None
"""A reference to the actual DBAPI connection being tracked."""
_connection_record = None
"""A reference to the :class:`._ConnectionRecord` object associated
with the DBAPI connection.
This is currently an internal accessor which is subject to change.
"""
_reset_agent = None
"""Refer to an object with a ``.commit()`` and ``.rollback()`` method;
if non-None, the "reset-on-return" feature will call upon this object
rather than directly against the dialect-level do_rollback() and
do_commit() methods.
In practice, a :class:`.Connection` assigns a :class:`.Transaction` object
to this variable when one is in scope so that the :class:`.Transaction`
takes the job of committing or rolling back on return if
:meth:`.Connection.close` is called while the :class:`.Transaction`
still exists.
This is essentially an "event handler" of sorts but is simplified as an
instance variable both for performance/simplicity as well as that there
can only be one "reset agent" at a time.
"""
@classmethod
def _checkout(cls, pool, threadconns=None, fairy=None):
if not fairy:
fairy = _ConnectionRecord.checkout(pool)
fairy._pool = pool
fairy._counter = 0
if threadconns is not None:
threadconns.current = weakref.ref(fairy)
if fairy.connection is None:
raise exc.InvalidRequestError("This connection is closed")
fairy._counter += 1
if not pool.dispatch.checkout or fairy._counter != 1:
return fairy
# Pool listeners can trigger a reconnection on checkout
attempts = 2
while attempts > 0:
try:
pool.dispatch.checkout(fairy.connection,
fairy._connection_record,
fairy)
return fairy
except exc.DisconnectionError as e:
pool.logger.info(
"Disconnection detected on checkout: %s", e)
fairy._connection_record.invalidate(e)
try:
fairy.connection = \
fairy._connection_record.get_connection()
except:
with util.safe_reraise():
fairy._connection_record.checkin()
attempts -= 1
pool.logger.info("Reconnection attempts exhausted on checkout")
fairy.invalidate()
raise exc.InvalidRequestError("This connection is closed")
def _checkout_existing(self):
return _ConnectionFairy._checkout(self._pool, fairy=self)
def _checkin(self):
_finalize_fairy(self.connection, self._connection_record,
self._pool, None, self._echo, fairy=self)
self.connection = None
self._connection_record = None
_close = _checkin
def _reset(self, pool):
if pool.dispatch.reset:
pool.dispatch.reset(self, self._connection_record)
if pool._reset_on_return is reset_rollback:
if self._echo:
pool.logger.debug("Connection %s rollback-on-return%s",
self.connection,
", via agent"
if self._reset_agent else "")
if self._reset_agent:
self._reset_agent.rollback()
else:
pool._dialect.do_rollback(self)
elif pool._reset_on_return is reset_commit:
if self._echo:
pool.logger.debug("Connection %s commit-on-return%s",
self.connection,
", via agent"
if self._reset_agent else "")
if self._reset_agent:
self._reset_agent.commit()
else:
pool._dialect.do_commit(self)
@property
def _logger(self):
return self._pool.logger
@property
def is_valid(self):
"""Return True if this :class:`._ConnectionFairy` still refers
to an active DBAPI connection."""
return self.connection is not None
@util.memoized_property
def info(self):
"""Info dictionary associated with the underlying DBAPI connection
referred to by this :class:`.ConnectionFairy`, allowing user-defined
data to be associated with the connection.
The data here will follow along with the DBAPI connection including
after it is returned to the connection pool and used again
in subsequent instances of :class:`._ConnectionFairy`. It is shared
with the :attr:`._ConnectionRecord.info` and :attr:`.Connection.info`
accessors.
The dictionary associated with a particular DBAPI connection is
discarded when the connection itself is discarded.
"""
return self._connection_record.info
@property
def record_info(self):
"""Info dictionary associated with the :class:`._ConnectionRecord
container referred to by this :class:`.ConnectionFairy`.
Unlike the :attr:`._ConnectionFairy.info` dictionary, the lifespan
of this dictionary is persistent across connections that are
disconnected and/or invalidated within the lifespan of a
:class:`._ConnectionRecord`.
.. versionadded:: 1.1
"""
if self._connection_record:
return self._connection_record.record_info
else:
return None
def invalidate(self, e=None, soft=False):
"""Mark this connection as invalidated.
This method can be called directly, and is also called as a result
of the :meth:`.Connection.invalidate` method. When invoked,
the DBAPI connection is immediately closed and discarded from
further use by the pool. The invalidation mechanism proceeds
via the :meth:`._ConnectionRecord.invalidate` internal method.
:param e: an exception object indicating a reason for the invalidation.
:param soft: if True, the connection isn't closed; instead, this
connection will be recycled on next checkout.
.. versionadded:: 1.0.3
.. seealso::
:ref:`pool_connection_invalidation`
"""
if self.connection is None:
util.warn("Can't invalidate an already-closed connection.")
return
if self._connection_record:
self._connection_record.invalidate(e=e, soft=soft)
if not soft:
self.connection = None
self._checkin()
def cursor(self, *args, **kwargs):
"""Return a new DBAPI cursor for the underlying connection.
This method is a proxy for the ``connection.cursor()`` DBAPI
method.
"""
return self.connection.cursor(*args, **kwargs)
def __getattr__(self, key):
return getattr(self.connection, key)
def detach(self):
"""Separate this connection from its Pool.
This means that the connection will no longer be returned to the
pool when closed, and will instead be literally closed. The
containing ConnectionRecord is separated from the DB-API connection,
and will create a new connection when next used.
Note that any overall connection limiting constraints imposed by a
Pool implementation may be violated after a detach, as the detached
connection is removed from the pool's knowledge and control.
"""
if self._connection_record is not None:
rec = self._connection_record
_refs.remove(rec)
rec.fairy_ref = None
rec.connection = None
# TODO: should this be _return_conn?
self._pool._do_return_conn(self._connection_record)
self.info = self.info.copy()
self._connection_record = None
if self._pool.dispatch.detach:
self._pool.dispatch.detach(self.connection, rec)
def close(self):
self._counter -= 1
if self._counter == 0:
self._checkin()
class SingletonThreadPool(Pool):
"""A Pool that maintains one connection per thread.
Maintains one connection per each thread, never moving a connection to a
thread other than the one which it was created in.
.. warning:: the :class:`.SingletonThreadPool` will call ``.close()``
on arbitrary connections that exist beyond the size setting of
``pool_size``, e.g. if more unique **thread identities**
than what ``pool_size`` states are used. This cleanup is
non-deterministic and not sensitive to whether or not the connections
linked to those thread identities are currently in use.
:class:`.SingletonThreadPool` may be improved in a future release,
however in its current status it is generally used only for test
scenarios using a SQLite ``:memory:`` database and is not recommended
for production use.
Options are the same as those of :class:`.Pool`, as well as:
:param pool_size: The number of threads in which to maintain connections
at once. Defaults to five.
:class:`.SingletonThreadPool` is used by the SQLite dialect
automatically when a memory-based database is used.
See :ref:`sqlite_toplevel`.
"""
def __init__(self, creator, pool_size=5, **kw):
kw['use_threadlocal'] = True
Pool.__init__(self, creator, **kw)
self._conn = threading.local()
self._all_conns = set()
self.size = pool_size
def recreate(self):
self.logger.info("Pool recreating")
return self.__class__(self._creator,
pool_size=self.size,
recycle=self._recycle,
echo=self.echo,
logging_name=self._orig_logging_name,
use_threadlocal=self._use_threadlocal,
reset_on_return=self._reset_on_return,
_dispatch=self.dispatch,
dialect=self._dialect)
def dispose(self):
"""Dispose of this pool."""
for conn in self._all_conns:
try:
conn.close()
except Exception:
# pysqlite won't even let you close a conn from a thread
# that didn't create it
pass
self._all_conns.clear()
def _cleanup(self):
while len(self._all_conns) >= self.size:
c = self._all_conns.pop()
c.close()
def status(self):
return "SingletonThreadPool id:%d size: %d" % \
(id(self), len(self._all_conns))
def _do_return_conn(self, conn):
pass
def _do_get(self):
try:
c = self._conn.current()
if c:
return c
except AttributeError:
pass
c = self._create_connection()
self._conn.current = weakref.ref(c)
if len(self._all_conns) >= self.size:
self._cleanup()
self._all_conns.add(c)
return c
class QueuePool(Pool):
"""A :class:`.Pool` that imposes a limit on the number of open connections.
:class:`.QueuePool` is the default pooling implementation used for
all :class:`.Engine` objects, unless the SQLite dialect is in use.
"""
def __init__(self, creator, pool_size=5, max_overflow=10, timeout=30,
**kw):
r"""
Construct a QueuePool.
:param creator: a callable function that returns a DB-API
connection object, same as that of :paramref:`.Pool.creator`.
:param pool_size: The size of the pool to be maintained,
defaults to 5. This is the largest number of connections that
will be kept persistently in the pool. Note that the pool
begins with no connections; once this number of connections
is requested, that number of connections will remain.
``pool_size`` can be set to 0 to indicate no size limit; to
disable pooling, use a :class:`~sqlalchemy.pool.NullPool`
instead.
:param max_overflow: The maximum overflow size of the
pool. When the number of checked-out connections reaches the
size set in pool_size, additional connections will be
returned up to this limit. When those additional connections
are returned to the pool, they are disconnected and
discarded. It follows then that the total number of
simultaneous connections the pool will allow is pool_size +
`max_overflow`, and the total number of "sleeping"
connections the pool will allow is pool_size. `max_overflow`
can be set to -1 to indicate no overflow limit; no limit
will be placed on the total number of concurrent
connections. Defaults to 10.
:param timeout: The number of seconds to wait before giving up
on returning a connection. Defaults to 30.
:param \**kw: Other keyword arguments including
:paramref:`.Pool.recycle`, :paramref:`.Pool.echo`,
:paramref:`.Pool.reset_on_return` and others are passed to the
:class:`.Pool` constructor.
"""
Pool.__init__(self, creator, **kw)
self._pool = sqla_queue.Queue(pool_size)
self._overflow = 0 - pool_size
self._max_overflow = max_overflow
self._timeout = timeout
self._overflow_lock = threading.Lock()
def _do_return_conn(self, conn):
try:
self._pool.put(conn, False)
except sqla_queue.Full:
try:
conn.close()
finally:
self._dec_overflow()
def _do_get(self):
use_overflow = self._max_overflow > -1
try:
wait = use_overflow and self._overflow >= self._max_overflow
return self._pool.get(wait, self._timeout)
except sqla_queue.Empty:
if use_overflow and self._overflow >= self._max_overflow:
if not wait:
return self._do_get()
else:
raise exc.TimeoutError(
"QueuePool limit of size %d overflow %d reached, "
"connection timed out, timeout %d" %
(self.size(), self.overflow(), self._timeout))
if self._inc_overflow():
try:
return self._create_connection()
except:
with util.safe_reraise():
self._dec_overflow()
else:
return self._do_get()
def _inc_overflow(self):
if self._max_overflow == -1:
self._overflow += 1
return True
with self._overflow_lock:
if self._overflow < self._max_overflow:
self._overflow += 1
return True
else:
return False
def _dec_overflow(self):
if self._max_overflow == -1:
self._overflow -= 1
return True
with self._overflow_lock:
self._overflow -= 1
return True
def recreate(self):
self.logger.info("Pool recreating")
return self.__class__(self._creator, pool_size=self._pool.maxsize,
max_overflow=self._max_overflow,
timeout=self._timeout,
recycle=self._recycle, echo=self.echo,
logging_name=self._orig_logging_name,
use_threadlocal=self._use_threadlocal,
reset_on_return=self._reset_on_return,
_dispatch=self.dispatch,
dialect=self._dialect)
def dispose(self):
while True:
try:
conn = self._pool.get(False)
conn.close()
except sqla_queue.Empty:
break
self._overflow = 0 - self.size()
self.logger.info("Pool disposed. %s", self.status())
def status(self):
return "Pool size: %d Connections in pool: %d "\
"Current Overflow: %d Current Checked out "\
"connections: %d" % (self.size(),
self.checkedin(),
self.overflow(),
self.checkedout())
def size(self):
return self._pool.maxsize
def checkedin(self):
return self._pool.qsize()
def overflow(self):
return self._overflow
def checkedout(self):
return self._pool.maxsize - self._pool.qsize() + self._overflow
class NullPool(Pool):
"""A Pool which does not pool connections.
Instead it literally opens and closes the underlying DB-API connection
per each connection open/close.
Reconnect-related functions such as ``recycle`` and connection
invalidation are not supported by this Pool implementation, since
no connections are held persistently.
.. versionchanged:: 0.7
:class:`.NullPool` is used by the SQlite dialect automatically
when a file-based database is used. See :ref:`sqlite_toplevel`.
"""
def status(self):
return "NullPool"
def _do_return_conn(self, conn):
conn.close()
def _do_get(self):
return self._create_connection()
def recreate(self):
self.logger.info("Pool recreating")
return self.__class__(self._creator,
recycle=self._recycle,
echo=self.echo,
logging_name=self._orig_logging_name,
use_threadlocal=self._use_threadlocal,
reset_on_return=self._reset_on_return,
_dispatch=self.dispatch,
dialect=self._dialect)
def dispose(self):
pass
class StaticPool(Pool):
"""A Pool of exactly one connection, used for all requests.
Reconnect-related functions such as ``recycle`` and connection
invalidation (which is also used to support auto-reconnect) are not
currently supported by this Pool implementation but may be implemented
in a future release.
"""
@memoized_property
def _conn(self):
return self._creator()
@memoized_property
def connection(self):
return _ConnectionRecord(self)
def status(self):
return "StaticPool"
def dispose(self):
if '_conn' in self.__dict__:
self._conn.close()
self._conn = None
def recreate(self):
self.logger.info("Pool recreating")
return self.__class__(creator=self._creator,
recycle=self._recycle,
use_threadlocal=self._use_threadlocal,
reset_on_return=self._reset_on_return,
echo=self.echo,
logging_name=self._orig_logging_name,
_dispatch=self.dispatch,
dialect=self._dialect)
def _create_connection(self):
return self._conn
def _do_return_conn(self, conn):
pass
def _do_get(self):
return self.connection
class AssertionPool(Pool):
"""A :class:`.Pool` that allows at most one checked out connection at
any given time.
This will raise an exception if more than one connection is checked out
at a time. Useful for debugging code that is using more connections
than desired.
.. versionchanged:: 0.7
:class:`.AssertionPool` also logs a traceback of where
the original connection was checked out, and reports
this in the assertion error raised.
"""
def __init__(self, *args, **kw):
self._conn = None
self._checked_out = False
self._store_traceback = kw.pop('store_traceback', True)
self._checkout_traceback = None
Pool.__init__(self, *args, **kw)
def status(self):
return "AssertionPool"
def _do_return_conn(self, conn):
if not self._checked_out:
raise AssertionError("connection is not checked out")
self._checked_out = False
assert conn is self._conn
def dispose(self):
self._checked_out = False
if self._conn:
self._conn.close()
def recreate(self):
self.logger.info("Pool recreating")
return self.__class__(self._creator, echo=self.echo,
logging_name=self._orig_logging_name,
_dispatch=self.dispatch,
dialect=self._dialect)
def _do_get(self):
if self._checked_out:
if self._checkout_traceback:
suffix = ' at:\n%s' % ''.join(
chop_traceback(self._checkout_traceback))
else:
suffix = ''
raise AssertionError("connection is already checked out" + suffix)
if not self._conn:
self._conn = self._create_connection()
self._checked_out = True
if self._store_traceback:
self._checkout_traceback = traceback.format_stack()
return self._conn
class _DBProxy(object):
"""Layers connection pooling behavior on top of a standard DB-API module.
Proxies a DB-API 2.0 connect() call to a connection pool keyed to the
specific connect parameters. Other functions and attributes are delegated
to the underlying DB-API module.
"""
def __init__(self, module, poolclass=QueuePool, **kw):
"""Initializes a new proxy.
module
a DB-API 2.0 module
poolclass
a Pool class, defaulting to QueuePool
Other parameters are sent to the Pool object's constructor.
"""
self.module = module
self.kw = kw
self.poolclass = poolclass
self.pools = {}
self._create_pool_mutex = threading.Lock()
def close(self):
for key in list(self.pools):
del self.pools[key]
def __del__(self):
self.close()
def __getattr__(self, key):
return getattr(self.module, key)
def get_pool(self, *args, **kw):
key = self._serialize(*args, **kw)
try:
return self.pools[key]
except KeyError:
self._create_pool_mutex.acquire()
try:
if key not in self.pools:
kw.pop('sa_pool_key', None)
pool = self.poolclass(
lambda: self.module.connect(*args, **kw), **self.kw)
self.pools[key] = pool
return pool
else:
return self.pools[key]
finally:
self._create_pool_mutex.release()
def connect(self, *args, **kw):
"""Activate a connection to the database.
Connect to the database using this DBProxy's module and the given
connect arguments. If the arguments match an existing pool, the
connection will be returned from the pool's current thread-local
connection instance, or if there is no thread-local connection
instance it will be checked out from the set of pooled connections.
If the pool has no available connections and allows new connections
to be created, a new database connection will be made.
"""
return self.get_pool(*args, **kw).connect()
def dispose(self, *args, **kw):
"""Dispose the pool referenced by the given connect arguments."""
key = self._serialize(*args, **kw)
try:
del self.pools[key]
except KeyError:
pass
def _serialize(self, *args, **kw):
if "sa_pool_key" in kw:
return kw['sa_pool_key']
return tuple(
list(args) +
[(k, kw[k]) for k in sorted(kw)]
)
| gpl-3.0 |
chenyyx/scikit-learn-doc-zh | examples/en/decomposition/plot_ica_vs_pca.py | 59 | 3329 | """
==========================
FastICA on 2D point clouds
==========================
This example illustrates visually in the feature space a comparison by
results using two different component analysis techniques.
:ref:`ICA` vs :ref:`PCA`.
Representing ICA in the feature space gives the view of 'geometric ICA':
ICA is an algorithm that finds directions in the feature space
corresponding to projections with high non-Gaussianity. These directions
need not be orthogonal in the original feature space, but they are
orthogonal in the whitened feature space, in which all directions
correspond to the same variance.
PCA, on the other hand, finds orthogonal directions in the raw feature
space that correspond to directions accounting for maximum variance.
Here we simulate independent sources using a highly non-Gaussian
process, 2 student T with a low number of degrees of freedom (top left
figure). We mix them to create observations (top right figure).
In this raw observation space, directions identified by PCA are
represented by orange vectors. We represent the signal in the PCA space,
after whitening by the variance corresponding to the PCA vectors (lower
left). Running ICA corresponds to finding a rotation in this space to
identify the directions of largest non-Gaussianity (lower right).
"""
print(__doc__)
# Authors: Alexandre Gramfort, Gael Varoquaux
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from sklearn.decomposition import PCA, FastICA
# #############################################################################
# Generate sample data
rng = np.random.RandomState(42)
S = rng.standard_t(1.5, size=(20000, 2))
S[:, 0] *= 2.
# Mix data
A = np.array([[1, 1], [0, 2]]) # Mixing matrix
X = np.dot(S, A.T) # Generate observations
pca = PCA()
S_pca_ = pca.fit(X).transform(X)
ica = FastICA(random_state=rng)
S_ica_ = ica.fit(X).transform(X) # Estimate the sources
S_ica_ /= S_ica_.std(axis=0)
# #############################################################################
# Plot results
def plot_samples(S, axis_list=None):
plt.scatter(S[:, 0], S[:, 1], s=2, marker='o', zorder=10,
color='steelblue', alpha=0.5)
if axis_list is not None:
colors = ['orange', 'red']
for color, axis in zip(colors, axis_list):
axis /= axis.std()
x_axis, y_axis = axis
# Trick to get legend to work
plt.plot(0.1 * x_axis, 0.1 * y_axis, linewidth=2, color=color)
plt.quiver(0, 0, x_axis, y_axis, zorder=11, width=0.01, scale=6,
color=color)
plt.hlines(0, -3, 3)
plt.vlines(0, -3, 3)
plt.xlim(-3, 3)
plt.ylim(-3, 3)
plt.xlabel('x')
plt.ylabel('y')
plt.figure()
plt.subplot(2, 2, 1)
plot_samples(S / S.std())
plt.title('True Independent Sources')
axis_list = [pca.components_.T, ica.mixing_]
plt.subplot(2, 2, 2)
plot_samples(X / np.std(X), axis_list=axis_list)
legend = plt.legend(['PCA', 'ICA'], loc='upper right')
legend.set_zorder(100)
plt.title('Observations')
plt.subplot(2, 2, 3)
plot_samples(S_pca_ / np.std(S_pca_, axis=0))
plt.title('PCA recovered signals')
plt.subplot(2, 2, 4)
plot_samples(S_ica_ / np.std(S_ica_))
plt.title('ICA recovered signals')
plt.subplots_adjust(0.09, 0.04, 0.94, 0.94, 0.26, 0.36)
plt.show()
| gpl-3.0 |
evro/CouchPotatoServer | libs/suds/metrics.py | 211 | 2004 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
The I{metrics} module defines classes and other resources
designed for collecting and reporting performance metrics.
"""
import time
from logging import getLogger
from suds import *
from math import modf
log = getLogger(__name__)
class Timer:
def __init__(self):
self.started = 0
self.stopped = 0
def start(self):
self.started = time.time()
self.stopped = 0
return self
def stop(self):
if self.started > 0:
self.stopped = time.time()
return self
def duration(self):
return ( self.stopped - self.started )
def __str__(self):
if self.started == 0:
return 'not-running'
if self.started > 0 and self.stopped == 0:
return 'started: %d (running)' % self.started
duration = self.duration()
jmod = ( lambda m : (m[1], m[0]*1000) )
if duration < 1:
ms = (duration*1000)
return '%d (ms)' % ms
if duration < 60:
m = modf(duration)
return '%d.%.3d (seconds)' % jmod(m)
m = modf(duration/60)
return '%d.%.3d (minutes)' % jmod(m)
| gpl-3.0 |
jswope00/griffinx | common/lib/capa/capa/checker.py | 123 | 5899 | #!/usr/bin/env python
"""
Commandline tool for doing operations on Problems
"""
from __future__ import unicode_literals
import argparse
import logging
import sys
from path import path
from cStringIO import StringIO
from calc import UndefinedVariable
from capa.capa_problem import LoncapaProblem
from mako.lookup import TemplateLookup
logging.basicConfig(format="%(levelname)s %(message)s")
log = logging.getLogger('capa.checker')
class DemoSystem(object):
def __init__(self):
self.lookup = TemplateLookup(directories=[path(__file__).dirname() / 'templates'])
self.DEBUG = True
def render_template(self, template_filename, dictionary, context=None):
if context is None:
context = {}
context_dict = {}
context_dict.update(dictionary)
context_dict.update(context)
return self.lookup.get_template(template_filename).render(**context_dict)
def main():
parser = argparse.ArgumentParser(description='Check Problem Files')
parser.add_argument("command", choices=['test', 'show']) # Watch? Render? Open?
parser.add_argument("files", nargs="+", type=argparse.FileType('r'))
parser.add_argument("--seed", required=False, type=int)
parser.add_argument("--log-level", required=False, default="INFO",
choices=['info', 'debug', 'warn', 'error',
'INFO', 'DEBUG', 'WARN', 'ERROR'])
args = parser.parse_args()
log.setLevel(args.log_level.upper())
system = DemoSystem()
for problem_file in args.files:
log.info("Opening {0}".format(problem_file.name))
try:
problem = LoncapaProblem(problem_file, "fakeid", seed=args.seed, system=system)
except Exception as ex:
log.error("Could not parse file {0}".format(problem_file.name))
log.exception(ex)
continue
if args.command == 'test':
command_test(problem)
elif args.command == 'show':
command_show(problem)
problem_file.close()
# In case we want to do anything else here.
def command_show(problem):
"""Display the text for this problem"""
print problem.get_html()
def command_test(problem):
# We're going to trap stdout/stderr from the problems (yes, some print)
old_stdout, old_stderr = sys.stdout, sys.stderr
try:
sys.stdout = StringIO()
sys.stderr = StringIO()
check_that_suggested_answers_work(problem)
check_that_blanks_fail(problem)
log_captured_output(sys.stdout,
"captured stdout from {0}".format(problem))
log_captured_output(sys.stderr,
"captured stderr from {0}".format(problem))
except Exception as e:
log.exception(e)
finally:
sys.stdout, sys.stderr = old_stdout, old_stderr
def check_that_blanks_fail(problem):
"""Leaving it blank should never work. Neither should a space."""
blank_answers = dict((answer_id, u"")
for answer_id in problem.get_question_answers())
grading_results = problem.grade_answers(blank_answers)
try:
assert(all(result == 'incorrect' for result in grading_results.values()))
except AssertionError:
log.error("Blank accepted as correct answer in {0} for {1}"
.format(problem,
[answer_id for answer_id, result
in sorted(grading_results.items())
if result != 'incorrect']))
def check_that_suggested_answers_work(problem):
"""Split this up so that we're only used for formula/numeric answers.
Examples of where this fails:
* Displayed answers use units but acceptable ones do not.
- L1e0.xml
- Presents itself as UndefinedVariable (when it tries to pass to calc)
* "a or d" is what's displayed, but only "a" or "d" is accepted, not the
string "a or d".
- L1-e00.xml
"""
# These are actual answers we get from the responsetypes
real_answers = problem.get_question_answers()
# all_answers is real_answers + blanks for other answer_ids for which the
# responsetypes can't provide us pre-canned answers (customresponse)
all_answer_ids = problem.get_answer_ids()
all_answers = dict((answer_id, real_answers.get(answer_id, ""))
for answer_id in all_answer_ids)
log.debug("Real answers: {0}".format(real_answers))
if real_answers:
try:
real_results = dict((answer_id, result) for answer_id, result
in problem.grade_answers(all_answers).items()
if answer_id in real_answers)
log.debug(real_results)
assert(all(result == 'correct'
for answer_id, result in real_results.items()))
except UndefinedVariable as uv_exc:
log.error("The variable \"{0}\" specified in the ".format(uv_exc) +
"solution isn't recognized (is it a units measure?).")
except AssertionError:
log.error("The following generated answers were not accepted for {0}:"
.format(problem))
for question_id, result in sorted(real_results.items()):
if result != 'correct':
log.error(" {0} = {1}".format(question_id, real_answers[question_id]))
except Exception as ex:
log.error("Uncaught error in {0}".format(problem))
log.exception(ex)
def log_captured_output(output_stream, stream_name):
output_stream.seek(0)
output_text = output_stream.read()
if output_text:
log.info("##### Begin {0} #####\n".format(stream_name) + output_text)
log.info("##### End {0} #####".format(stream_name))
if __name__ == '__main__':
sys.exit(main())
| agpl-3.0 |
hachard/Cra-Magnet | flask/lib/python3.5/site-packages/defusedxml/ElementTree.py | 53 | 4031 | # defusedxml
#
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/psf/license for licensing details.
"""Defused xml.etree.ElementTree facade
"""
from __future__ import print_function, absolute_import
import sys
from .common import PY3, PY26, PY31
if PY3:
import importlib
else:
from xml.etree.ElementTree import XMLParser as _XMLParser
from xml.etree.ElementTree import iterparse as _iterparse
if PY26:
from xml.parsers.expat import ExpatError as ParseError
else:
from xml.etree.ElementTree import ParseError
_IterParseIterator = None
from xml.etree.ElementTree import TreeBuilder as _TreeBuilder
from xml.etree.ElementTree import parse as _parse
from xml.etree.ElementTree import tostring
from .common import (DTDForbidden, EntitiesForbidden,
ExternalReferenceForbidden, _generate_etree_functions)
__origin__ = "xml.etree.ElementTree"
def _get_py3_cls():
"""Python 3.3 hides the pure Python code but defusedxml requires it.
The code is based on test.support.import_fresh_module().
"""
pymodname = "xml.etree.ElementTree"
cmodname = "_elementtree"
pymod = sys.modules.pop(pymodname, None)
cmod = sys.modules.pop(cmodname, None)
sys.modules[cmodname] = None
pure_pymod = importlib.import_module(pymodname)
if cmod is not None:
sys.modules[cmodname] = cmod
else:
sys.modules.pop(cmodname)
sys.modules[pymodname] = pymod
_XMLParser = pure_pymod.XMLParser
_iterparse = pure_pymod.iterparse
if PY31:
_IterParseIterator = None
from xml.parsers.expat import ExpatError as ParseError
else:
_IterParseIterator = pure_pymod._IterParseIterator
ParseError = pure_pymod.ParseError
return _XMLParser, _iterparse, _IterParseIterator, ParseError
if PY3:
_XMLParser, _iterparse, _IterParseIterator, ParseError = _get_py3_cls()
class DefusedXMLParser(_XMLParser):
def __init__(self, html=0, target=None, encoding=None,
forbid_dtd=False, forbid_entities=True,
forbid_external=True):
if PY26 or PY31:
_XMLParser.__init__(self, html, target)
else:
# Python 2.x old style class
_XMLParser.__init__(self, html, target, encoding)
self.forbid_dtd = forbid_dtd
self.forbid_entities = forbid_entities
self.forbid_external = forbid_external
if PY3 and not PY31:
parser = self.parser
else:
parser = self._parser
if self.forbid_dtd:
parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl
if self.forbid_entities:
parser.EntityDeclHandler = self.defused_entity_decl
parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl
if self.forbid_external:
parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler
def defused_start_doctype_decl(self, name, sysid, pubid,
has_internal_subset):
raise DTDForbidden(name, sysid, pubid)
def defused_entity_decl(self, name, is_parameter_entity, value, base,
sysid, pubid, notation_name):
raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
def defused_unparsed_entity_decl(self, name, base, sysid, pubid,
notation_name):
# expat 1.2
raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name)
def defused_external_entity_ref_handler(self, context, base, sysid,
pubid):
raise ExternalReferenceForbidden(context, base, sysid, pubid)
# aliases
XMLTreeBuilder = XMLParse = DefusedXMLParser
parse, iterparse, fromstring = _generate_etree_functions(DefusedXMLParser,
_TreeBuilder, _IterParseIterator, _parse, _iterparse)
XML = fromstring
| gpl-3.0 |
Azulinho/ansible | lib/ansible/modules/web_infrastructure/supervisorctl.py | 49 | 8666 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Matt Wright <matt@nobien.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: supervisorctl
short_description: Manage the state of a program or group of programs running via supervisord
description:
- Manage the state of a program or group of programs running via supervisord
version_added: "0.7"
options:
name:
description:
- The name of the supervisord program or group to manage.
- The name will be taken as group name when it ends with a colon I(:)
- Group support is only available in Ansible version 1.6 or later.
required: true
default: null
config:
description:
- The supervisor configuration file path
required: false
default: null
version_added: "1.3"
server_url:
description:
- URL on which supervisord server is listening
required: false
default: null
version_added: "1.3"
username:
description:
- username to use for authentication
required: false
default: null
version_added: "1.3"
password:
description:
- password to use for authentication
required: false
default: null
version_added: "1.3"
state:
description:
- The desired state of program/group.
required: true
default: null
choices: [ "present", "started", "stopped", "restarted", "absent" ]
supervisorctl_path:
description:
- path to supervisorctl executable
required: false
default: null
version_added: "1.4"
notes:
- When C(state) = I(present), the module will call C(supervisorctl reread) then C(supervisorctl add) if the program/group does not exist.
- When C(state) = I(restarted), the module will call C(supervisorctl update) then call C(supervisorctl restart).
requirements: [ "supervisorctl" ]
author:
- "Matt Wright (@mattupstate)"
- "Aaron Wang (@inetfuture) <inetfuture@gmail.com>"
'''
EXAMPLES = '''
# Manage the state of program to be in 'started' state.
- supervisorctl:
name: my_app
state: started
# Manage the state of program group to be in 'started' state.
- supervisorctl:
name: 'my_apps:'
state: started
# Restart my_app, reading supervisorctl configuration from a specified file.
- supervisorctl:
name: my_app
state: restarted
config: /var/opt/my_project/supervisord.conf
# Restart my_app, connecting to supervisord with credentials and server URL.
- supervisorctl:
name: my_app
state: restarted
username: test
password: testpass
server_url: http://localhost:9001
'''
import os
from ansible.module_utils.basic import AnsibleModule, is_executable
def main():
arg_spec = dict(
name=dict(required=True),
config=dict(required=False, type='path'),
server_url=dict(required=False),
username=dict(required=False),
password=dict(required=False, no_log=True),
supervisorctl_path=dict(required=False, type='path'),
state=dict(required=True, choices=['present', 'started', 'restarted', 'stopped', 'absent'])
)
module = AnsibleModule(argument_spec=arg_spec, supports_check_mode=True)
name = module.params['name']
is_group = False
if name.endswith(':'):
is_group = True
name = name.rstrip(':')
state = module.params['state']
config = module.params.get('config')
server_url = module.params.get('server_url')
username = module.params.get('username')
password = module.params.get('password')
supervisorctl_path = module.params.get('supervisorctl_path')
# we check error message for a pattern, so we need to make sure that's in C locale
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
if supervisorctl_path:
if os.path.exists(supervisorctl_path) and is_executable(supervisorctl_path):
supervisorctl_args = [supervisorctl_path]
else:
module.fail_json(
msg="Provided path to supervisorctl does not exist or isn't executable: %s" % supervisorctl_path)
else:
supervisorctl_args = [module.get_bin_path('supervisorctl', True)]
if config:
supervisorctl_args.extend(['-c', config])
if server_url:
supervisorctl_args.extend(['-s', server_url])
if username:
supervisorctl_args.extend(['-u', username])
if password:
supervisorctl_args.extend(['-p', password])
def run_supervisorctl(cmd, name=None, **kwargs):
args = list(supervisorctl_args) # copy the master args
args.append(cmd)
if name:
args.append(name)
return module.run_command(args, **kwargs)
def get_matched_processes():
matched = []
rc, out, err = run_supervisorctl('status')
for line in out.splitlines():
# One status line may look like one of these two:
# process not in group:
# echo_date_lonely RUNNING pid 7680, uptime 13:22:18
# process in group:
# echo_date_group:echo_date_00 RUNNING pid 7681, uptime 13:22:18
fields = [field for field in line.split(' ') if field != '']
process_name = fields[0]
status = fields[1]
if is_group:
# If there is ':', this process must be in a group.
if ':' in process_name:
group = process_name.split(':')[0]
if group != name:
continue
else:
continue
else:
if process_name != name:
continue
matched.append((process_name, status))
return matched
def take_action_on_processes(processes, status_filter, action, expected_result):
to_take_action_on = []
for process_name, status in processes:
if status_filter(status):
to_take_action_on.append(process_name)
if len(to_take_action_on) == 0:
module.exit_json(changed=False, name=name, state=state)
if module.check_mode:
module.exit_json(changed=True)
for process_name in to_take_action_on:
rc, out, err = run_supervisorctl(action, process_name, check_rc=True)
if '%s: %s' % (process_name, expected_result) not in out:
module.fail_json(msg=out)
module.exit_json(changed=True, name=name, state=state, affected=to_take_action_on)
if state == 'restarted':
rc, out, err = run_supervisorctl('update', check_rc=True)
processes = get_matched_processes()
if len(processes) == 0:
module.fail_json(name=name, msg="ERROR (no such process)")
take_action_on_processes(processes, lambda s: True, 'restart', 'started')
processes = get_matched_processes()
if state == 'absent':
if len(processes) == 0:
module.exit_json(changed=False, name=name, state=state)
if module.check_mode:
module.exit_json(changed=True)
run_supervisorctl('reread', check_rc=True)
rc, out, err = run_supervisorctl('remove', name)
if '%s: removed process group' % name in out:
module.exit_json(changed=True, name=name, state=state)
else:
module.fail_json(msg=out, name=name, state=state)
if state == 'present':
if len(processes) > 0:
module.exit_json(changed=False, name=name, state=state)
if module.check_mode:
module.exit_json(changed=True)
run_supervisorctl('reread', check_rc=True)
rc, out, err = run_supervisorctl('add', name)
if '%s: added process group' % name in out:
module.exit_json(changed=True, name=name, state=state)
else:
module.fail_json(msg=out, name=name, state=state)
if state == 'started':
if len(processes) == 0:
module.fail_json(name=name, msg="ERROR (no such process)")
take_action_on_processes(processes, lambda s: s not in ('RUNNING', 'STARTING'), 'start', 'started')
if state == 'stopped':
if len(processes) == 0:
module.fail_json(name=name, msg="ERROR (no such process)")
take_action_on_processes(processes, lambda s: s in ('RUNNING', 'STARTING'), 'stop', 'stopped')
if __name__ == '__main__':
main()
| gpl-3.0 |
biocore/qiime | tests/test_split_libraries_lea_seq.py | 15 | 24312 | #!/usr/bin/env python
from __future__ import division
__author__ = "Charudatta Navare"
__copyright__ = "Copyright 2014, The QIIME Project"
__credits__ = ["Charudatta Navare"]
__license__ = "GPL"
__version__ = "1.9.1-dev"
__maintainer__ = "Charudatta Navare"
__email__ = "charudatta.navare@gmail.com"
from tempfile import NamedTemporaryFile
from unittest import TestCase, main
from skbio.util import remove_files
from qiime.util import get_qiime_temp_dir
from qiime.split_libraries_lea_seq import (get_cluster_ratio, get_consensus,
get_LEA_seq_consensus_seqs,
select_unique_rand_bcs,
extract_primer,
format_lea_seq_log,
process_mapping_file,
check_barcodes,
get_consensus_seqs_lookup,
read_fwd_rev_read,
InvalidGolayBarcodeError,
BarcodeLenMismatchError,
SeqLengthMismatchError,
LowConsensusScoreError,
PrimerMismatchError)
class WorkflowTests(TestCase):
def setUp(self):
"""setup the test values"""
# define test data
self.temp_dir = get_qiime_temp_dir()
self.fasta_seqs_of_rand_bcs = fasta_seqs_of_rand_bcs
self.fasta_seqs_for_cluster_ratio = fasta_seqs_for_cluster_ratio
self.fasta_seqs_for_consensus = fasta_seqs_for_consensus
self.fwd_read_data = fwd_read_data
self.rev_read_data = rev_read_data
self.get_cons_fwd_read_data = get_cons_fwd_read_data
self.get_cons_rev_read_data = get_cons_rev_read_data
self.fwd_read_fh = NamedTemporaryFile(
delete=False,
mode='w',
dir=self.temp_dir)
self.fwd_read_fh_name = self.fwd_read_fh.name
self.fwd_read_fh.write(self.fwd_read_data)
self.fwd_read_fh.close()
self.fwd_read_fh = open(self.fwd_read_fh_name, 'r')
self.rev_read_fh = NamedTemporaryFile(
delete=False,
mode='w',
dir=self.temp_dir)
self.rev_read_fh_name = self.rev_read_fh.name
self.rev_read_fh.write(self.rev_read_data)
self.rev_read_fh.close()
self.rev_read_fh = open(self.rev_read_fh_name, 'r')
self.get_cons_fwd_read_fh = NamedTemporaryFile(
delete=False,
mode='w',
dir=self.temp_dir)
self.get_cons_fwd_read_fh_name = self.get_cons_fwd_read_fh.name
self.get_cons_fwd_read_fh.write(self.get_cons_fwd_read_data)
self.get_cons_fwd_read_fh.close()
self.get_cons_fwd_read_fh = open(self.get_cons_fwd_read_fh_name, 'r')
self.get_cons_rev_read_fh = NamedTemporaryFile(
delete=False,
mode='w',
dir=self.temp_dir)
self.get_cons_rev_read_fh_name = self.get_cons_rev_read_fh.name
self.get_cons_rev_read_fh.write(self.get_cons_rev_read_data)
self.get_cons_rev_read_fh.close()
self.get_cons_rev_read_fh = open(self.get_cons_rev_read_fh_name, 'r')
self.mapping_data = mapping_data
self.get_cons_mapping_data = get_cons_mapping_data
self.fasta_seq_for_primer = fasta_seq_for_primer
self.possible_primers = possible_primers
self.fasta_seqs_for_consensus_tie_G_C = \
fasta_seqs_for_consensus_tie_G_C
self.fasta_seqs_for_consensus_unequal_length = \
fasta_seqs_for_consensus_unequal_length
self.seqs_with_no_consensus = seqs_with_no_consensus
self.fasta_file_for_consensus_tie_G_C = NamedTemporaryFile(
delete=False,
mode='w',
dir=self.temp_dir)
self.fasta_file_for_consensus_tie_G_C_name = \
self.fasta_file_for_consensus_tie_G_C.name
self.fasta_file_for_consensus_tie_G_C.write(
self.fasta_seqs_for_consensus_tie_G_C)
self.fasta_file_for_consensus_tie_G_C.close()
self.fasta_file_for_consensus_tie_G_C = open(
self.fasta_file_for_consensus_tie_G_C_name, 'r')
self.fasta_file_for_consensus_unequal_length = NamedTemporaryFile(
delete=False,
mode='w',
dir=self.temp_dir)
self.fasta_file_for_consensus_unequal_length_name = \
self.fasta_file_for_consensus_unequal_length.name
self.fasta_file_for_consensus_unequal_length.write(
self.fasta_seqs_for_consensus_unequal_length)
self.fasta_file_for_consensus_unequal_length.close()
self.fasta_file_for_consensus_unequal_length = open(
self.fasta_file_for_consensus_unequal_length_name, 'r')
self.fasta_file_no_consensus = NamedTemporaryFile(
delete=False,
mode='w',
dir=self.temp_dir)
self.fasta_file_no_consensus_name = self.fasta_file_no_consensus.name
self.fasta_file_no_consensus.write(self.seqs_with_no_consensus)
self.fasta_file_no_consensus.close()
self.fasta_file_no_consensus = open(
self.fasta_file_no_consensus_name, 'r')
self.min_difference_in_clusters = min_difference_in_clusters
self.mapping_fp = NamedTemporaryFile(
delete=False,
mode='w',
dir=self.temp_dir)
self.mapping_fp.write(self.mapping_data)
self.mapping_fp_name = self.mapping_fp.name
self.mapping_fp.close()
self.mapping_fp = open(self.mapping_fp_name, 'r')
self.get_cons_mapping_fp = NamedTemporaryFile(
delete=False,
mode='w',
dir=self.temp_dir)
self.get_cons_mapping_fp.write(self.get_cons_mapping_data)
self.get_cons_mapping_fp_name = self.get_cons_mapping_fp.name
self.get_cons_mapping_fp.close()
self.get_cons_mapping_fp = open(self.get_cons_mapping_fp_name, 'r')
self.false_primers = false_primers
self.barcode_len = barcode_len
self.barcode_correction_fn = barcode_correction_fn
self.max_barcode_errors = max_barcode_errors
self.fwd_length = fwd_length
self.rev_length = fwd_length
self.bc_to_sid = bc_to_sid
self.bc_to_fwd_primers = bc_to_fwd_primers
self.bc_to_rev_primers = bc_to_rev_primers
self.min_difference_in_bcs = min_difference_in_bcs
self.min_reads_per_random_bc = min_reads_per_random_bc
self.max_cluster_ratio = max_cluster_ratio
def tearDown(self):
"""remove all the files after completing tests """
self.mapping_fp.close()
self.fasta_file_no_consensus.close()
self.fasta_file_for_consensus_tie_G_C.close()
self.fasta_file_for_consensus_unequal_length.close()
remove_files([self.mapping_fp_name,
self.fasta_file_no_consensus_name,
self.fasta_file_for_consensus_tie_G_C_name,
self.fasta_file_for_consensus_unequal_length_name,
self.fwd_read_fh_name, self.rev_read_fh_name])
def test_select_unique_rand_bcs(self):
actual = select_unique_rand_bcs(self.fasta_seqs_of_rand_bcs, 0.86)
expected = set(['ATTGCATTGCATTGCATTGC', 'ATTGCTTATTGCATTGCTTT'])
self.assertEqual(actual, expected)
def test_get_consensus(self):
actual = get_consensus(self.fasta_file_for_consensus_tie_G_C, 2)
# at the last position, G and C have the same frequency
# therefore the function is expected to return
# consensus sequence with G, which is present in seq
# that appears max times. (10, 10) while C appreared
# in sequence that have count: (9, 6, 5)
# If there is still a tie, the function will return
# the base that appeared first.
# This method is just for a consistent way
# to resolve ties
expected = 'ATTTTATTTTATTTTTATTTATTATATATTATATATATATAGCGCGCGCGCGCGG'
self.assertEqual(actual, expected)
# Sequences having unequal length:
with self.assertRaises(SeqLengthMismatchError):
get_consensus(self.fasta_file_for_consensus_unequal_length, 2)
fasta_file_no_consensus = self.fasta_file_no_consensus
with self.assertRaises(LowConsensusScoreError):
get_consensus(fasta_file_no_consensus, 6.6)
def test_get_cluster_ratio(self):
actual = get_cluster_ratio(
self.fasta_seqs_for_cluster_ratio,
self.min_difference_in_clusters)
expected = 2.5
self.assertEqual(actual, expected)
def test_extract_primers(self):
actual = extract_primer(
self.fasta_seq_for_primer, self.possible_primers)
expected = ('A', 'ATGC', 'CCCC')
self.assertEqual(actual, expected)
with self.assertRaises(PrimerMismatchError):
extract_primer(self.fasta_seq_for_primer, self.false_primers)
def test_get_LEA_seq_consensus_seqs(self):
barcode_type = int(7)
barcode_len = 7
barcode_correction_fn = None
max_barcode_errors = 1.5
min_consensus = 0.66
max_cluster_ratio = 2.5
min_difference_in_bcs = 0.86
fwd_length = 19
rev_length = 19
min_reads_per_random_bc = 1
min_diff_in_clusters = self.min_difference_in_clusters
barcode_column = 'BarcodeSequence'
reverse_primer_column = 'ReversePrimer'
function_call, _ = get_LEA_seq_consensus_seqs(self.fwd_read_fh,
self.rev_read_fh,
self.mapping_fp,
self.temp_dir,
barcode_type,
barcode_len,
barcode_correction_fn,
max_barcode_errors,
min_consensus,
max_cluster_ratio,
min_difference_in_bcs,
fwd_length,
rev_length,
min_reads_per_random_bc,
min_diff_in_clusters,
barcode_column,
reverse_primer_column)
actual = function_call['Sample1']['AGCTACGAGCTATTGC']
expected = 'AAAAAAAAAAAAAAAAAAA^AAAAAAAAAAAAAAAAAA'
self.assertEqual(actual, expected)
# this call tests the second condition of if loop
# in the function get_consensus_seq_lookup
# i.e. select the majority sequence, as the cluster ratio
# between max_cluster/second_best_cluster in the fwd_read_data
# (and rev_read_data) is 3/1 > 2.5,
# so the function get_consensus will not be called
fn_call, _ = get_LEA_seq_consensus_seqs(self.get_cons_fwd_read_fh,
self.get_cons_rev_read_fh,
self.get_cons_mapping_fp,
self.temp_dir,
barcode_type,
barcode_len,
barcode_correction_fn,
max_barcode_errors,
min_consensus,
max_cluster_ratio,
min_difference_in_bcs,
fwd_length,
rev_length,
min_reads_per_random_bc,
min_diff_in_clusters,
barcode_column,
reverse_primer_column)
get_cons_actual = fn_call['Sample1']['AGCTACGAGCTATTGC']
get_cons_expected = 'AAAAAAAAAACAAAAAAAA^AAAAAAAAAATAAAAATA'
self.assertEqual(get_cons_actual, get_cons_expected)
# this call tests the third condition of if loop
# in the function get_consensus_seq_lookup.
# i.e. calls the get_consensus function, as the cluster ratio
# between max_cluster/second_best_cluster in the get_cons_fwd_read_data
# (and get_cons_rev_read_data) is 2/1 ( < 2.5)
# so the majority sequence will not be selected
get_cons_actual = fn_call['Sample2']['AGCTACGCATCAAGGG']
get_cons_expected = 'AAAAAAAAAATAAAAAAAA^TTAAAAAAAAAAAAGAAAA'
self.assertEqual(get_cons_actual, get_cons_expected)
self.assertFalse(len(fn_call) <= 1,
msg="The get_consensus_seqs_lookup function "
"has returned early, without completing "
"the three 'for' loops.")
def test_format_lea_seq_log(self):
actual = format_lea_seq_log(1, 2, 3, 4, 5, 6)
expected = """Quality filter results
Total number of input sequences: 1
Barcode not in mapping file: 3
Sequence shorter than threshold: 5
Barcode errors exceeds limit: 2
Primer mismatch count: 4
Total number seqs written: 6"""
self.assertEqual(actual, expected)
def test_process_mapping_file(self):
barcode_type = int(7)
barcode_len = 7
barcode_column = 'BarcodeSequence'
reverse_primer_column = 'ReversePrimer'
actual = process_mapping_file(self.mapping_fp,
barcode_len,
barcode_type,
barcode_column,
reverse_primer_column)
bc_to_sid = ({'CCGGCAG': 'Sample1'},)
bc_to_fwd_primers = ({'CCGGCAG': {'AGAGTTTGATCCTGGCTCAG': 20}},)
bc_to_rev_primers = ({'CCGGCAG': ['GGGCCGTGTCTCAGT']},)
expected = bc_to_sid + bc_to_fwd_primers + bc_to_rev_primers
self.assertEqual(actual, expected)
def test_check_barcodes(self):
barcode_type = 'golay_12'
barcode_len = 7
bc_to_sid = {'CCGGCAG': 'Sample1'}
with self.assertRaises(InvalidGolayBarcodeError):
check_barcodes(bc_to_sid, barcode_len, barcode_type)
barcode_len = 1
with self.assertRaises(BarcodeLenMismatchError):
check_barcodes(bc_to_sid, barcode_len, barcode_type)
def test_read_fwd_rev_read(self):
expected_seqs_kept = 4
function_call = read_fwd_rev_read(self.fwd_read_fh,
self.rev_read_fh,
self.bc_to_sid,
self.barcode_len,
self.barcode_correction_fn,
self.bc_to_fwd_primers,
self.bc_to_rev_primers,
self.max_barcode_errors,
self.fwd_length,
self.rev_length)
actual_seqs_kept = function_call[-1]
self.assertEqual(actual_seqs_kept, expected_seqs_kept)
def test_get_consensus_seqs_lookup(self):
fn_call_fwd_rev_read = read_fwd_rev_read(self.fwd_read_fh,
self.rev_read_fh,
self.bc_to_sid,
self.barcode_len,
self.barcode_correction_fn,
self.bc_to_fwd_primers,
self.bc_to_rev_primers,
self.max_barcode_errors,
self.fwd_length,
self.rev_length)
random_bc_lookup = fn_call_fwd_rev_read[0]
random_bc_reads = fn_call_fwd_rev_read[1]
random_bcs = fn_call_fwd_rev_read[2]
min_difference_bcs = self.min_difference_in_bcs
min_diff_clusters = self.min_difference_in_clusters
min_reads_rand_bc = self.min_reads_per_random_bc
max_cluster_ratio = self.max_cluster_ratio
output_dir = self.temp_dir
fn_call_get_consensus = get_consensus_seqs_lookup(random_bc_lookup,
random_bc_reads,
random_bcs,
min_difference_bcs,
min_reads_rand_bc,
output_dir,
min_diff_clusters,
max_cluster_ratio,
min_consensus)
actual = fn_call_get_consensus['Sample1']['AGCTACGAGCTATTGC']
expected = 'AAAAAAAAAAAAAAAAAAA^AAAAAAAAAAAAAAAAAA'
self.assertEqual(actual, expected)
fasta_seqs_for_cluster_ratio = """>1abc|1
ATTTTATTTTATTTTTATTTATTATATATTATATATATATAGCGCGCGCGCGCGG
>2abc|1
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
>3abc|1
ATTTTATTTTATTTTTATTTATTATATATTATATATATATAGCGCGCGCGCGCGG
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
>4abc|1
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
>5abc|1
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
>6abc|1
ATTTTATTTTATTTTTATTTATTATATATTATATATATATAGCGCGCGCGCGCGG
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
>7abc|1
ATTTTATTTTATTTTTATTTATTATATATTATATATATATAGCGCGCGCGCGCGG
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
>8abc|1
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
>9abc|1
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
>10abc|1
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
>11abc|1
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
>12abc|1
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
>13abc|1
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
>14abc|1
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
GGTCGGTCGTGCGTGCTCGTCGTGCTCGTCGTCGTCGCTCGTCGTCGCTGCTCTC
"""
fasta_seqs_for_consensus = """>1id1|1
ATGCATGG
>2id2|14
ATGCATGC
"""
fasta_seqs_for_consensus_unequal_length = """>1id1|1
ATGCATGG
>2id2|14
ATGCATGCT
"""
fasta_seqs_for_consensus_tie_G_C = """>abc1|10
ATTTTATTTTATTTTTATTTATTATATATTATATATATATAGCGCGCGCGCGCGG
>abc1|9
ATTTTATGGGCGGCGCGCCGCGCGCGCATTATATATATATAGCGCGCGCGCGCGC
>abc1|5
ATTTTATTTTATTTTTATTTATTATATATTATATATATATAGCGCGCGCGCGCGC
>abc1|10
ATTTTATTTTATTTTTATTTATTATATATTATATATATATAGCGCGCGCGCGCGG
>abc1|6
ATTTTATTTTATTTTTATTTATTATATATTATATATATATAGCGCGCGCGCGCGC
"""
fasta_seqs_of_rand_bcs = [
'ATTGCATTGCATTGCATTGC',
'ATTGCATTGCATTGCATTGC',
'ATTGCATTGCATTGCATTG',
'ATTGCTTATTGCATTGCTTT']
fwd_read_data = """@1____
AGCTACGAGCTATTGCAGAGTTTGATCCTGGCTCAGAAAAAAAAAAAAAAAAAAACCGGCAG
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@2____
AGCTACGAGCTATTGCAGAGTTTGATCCTGGCTCAGAAAAAAAAAAAAAAAAAAACCGGCAG
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@3____
AGCTACGAGCTATTGCAGAGTTTGATCCTGGCTCAGAAAAAAAAAAAAAAAAAAACCGGCAG
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@4____
AGCTACGAGCTATTGCAGAGTTTGATCCTGGCTCAGAAAAAAAAAAATTAAAAAACCGGCAG
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
"""
rev_read_data = """@1____
CCGGCAGAGCTACGAGCTATTGCGGGCCGTGTCTCAGTAAAAAAAAAAAAAAAAAA
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@2____
CCGGCAGAGCTACGAGCTATTGCGGGCCGTGTCTCAGTAAAAAAAAAAAAAAAAAA
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@3____
CCGGCAGAGCTACGAGCTATTGCGGGCCGTGTCTCAGTAAAAAAAAAAAAAAAAAA
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@4____
CCGGCAGAGCTACGAGCTATTGCGGGCCGTGTCTCAGTAAAAAAAAAAAAAAACCA
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
"""
mapping_data = """"#SampleID BarcodeSequence LinkerPrimerSequence ReversePrimer Description
Sample1 CCGGCAG AGAGTTTGATCCTGGCTCAG GGGCCGTGTCTCAGT Sample1 description"""
get_cons_fwd_read_data = """@1____
AGCTACGCATCAAGGGTTTTTTTTTTTTTTTTTTTTAAAAAAAAAAGAAAAAAAACCAACAG
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@2____
AGCTACGCATCAAGGGTTTTTTTTTTTTTTTTTTTTAAAAAAAAAATAAAAAAAACCAACAG
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@3____
AGCTACGAGCTATTGCAGAGTTTGATCCTGGCTCAGAAAAAAAAAACAAAAAAAACCGGCAG
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@4____
AGCTACGAGCTATTGCAGAGTTTGATCCTGGCTCAGAAAAAAAAAACAAAAAAAACCGGCAG
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@5____
AGCTACGAGCTATTGCTTTTTTTTTTTTTTTTTTTTAAAAAAAAAAGAAAAAAAACCGGCAG
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@6____
AGCTACGAGCTATTGCTTTTTTTTTTTTTTTTTTTTAAAAAAAAAATAAAAAAAACCGGCAG
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
"""
get_cons_rev_read_data = """@1____
CCAACAGAGCTACGAGCTATTTTTTTTTTTTTTTTTAAAAAAAAAAAAGAAAAAAA
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@2____
CCAACAGAGCTACGAGCTATTTTTTTTTTTTTTTTTAAAAAAAAAAAAGAAAAAAA
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@3____
CCGGCAGAGCTACGAGCTATTGCGGGCCGTGTCTCAGTAAAAAAAAAATAAAAACA
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@4____
CCGGCAGAGCTACGAGCTATTGCGGGCCGTGTCTCAGTAAAAAAAAAAAAAAAATA
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@5____
CCGGCAGAGCTACGAGCTATTTTTTTTTTTTTTTTAAAAAAAAAAAATAAAAACAA
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@6____
CCGGCAGAGCTACGAGCTATTTTTTTTTTTTTTTTAAAAAAAAAAAAAAAAAATAA
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
"""
get_cons_mapping_data = """"#SampleID BarcodeSequence LinkerPrimerSequence ReversePrimer Description
Sample1 CCGGCAG AGAGTTTGATCCTGGCTCAG GGGCCGTGTCTCAGT Sample1 description
Sample2 CCAACAG TTTTTTTTTTTTTTTTTTTT TTTTTTTTTTTTTTT Sample2 description"""
# breakdown of get_cons_fwd_read_data = """@1____
# for testing:
# AGCTACGCATCAAGGG random barcode sequence 1-16
# AGAGTTTGATCCTGGCTCAG Linker Primer sequence 17 - 36
# AAAAAAAAAAGAAAAAAAA sequence 37 - 55
# CCGGCAG BarcodeSequence 56 - 63
barcode_type = int(7)
barcode_len = 7
barcode_correction_fn = None
max_barcode_errors = 1.5
min_consensus = 0.66
max_cluster_ratio = 2.5
min_difference_in_bcs = 0.86
fwd_length = 19
rev_length = 19
min_reads_per_random_bc = 1
barcode_column = 'BarcodeSequence'
reverse_primer_column = 'ReversePrimer'
bc_to_sid = {'CCGGCAG': 'Sample1'}
bc_to_fwd_primers = {'CCGGCAG': {'AGAGTTTGATCCTGGCTCAG': 20}}
bc_to_rev_primers = {'CCGGCAG': ['GGGCCGTGTCTCAGT']}
seqs_with_no_consensus = """>id1|1
ATGC
>id2|1
TGCA
>id3|1
GCAT
>id4|1
CATG"""
min_difference_in_clusters = 0.98
fasta_seq_for_primer = 'AATGCCCCC'
possible_primers = ['ATGC', 'ATTT']
false_primers = ['AAAA']
# run tests if called from command line
if __name__ == '__main__':
main()
| gpl-2.0 |
mwv/scikit-learn | examples/classification/plot_lda_qda.py | 164 | 4806 | """
====================================================================
Linear and Quadratic Discriminant Analysis with confidence ellipsoid
====================================================================
Plot the confidence ellipsoids of each class and decision boundary
"""
print(__doc__)
from scipy import linalg
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
from matplotlib import colors
from sklearn.lda import LDA
from sklearn.qda import QDA
###############################################################################
# colormap
cmap = colors.LinearSegmentedColormap(
'red_blue_classes',
{'red': [(0, 1, 1), (1, 0.7, 0.7)],
'green': [(0, 0.7, 0.7), (1, 0.7, 0.7)],
'blue': [(0, 0.7, 0.7), (1, 1, 1)]})
plt.cm.register_cmap(cmap=cmap)
###############################################################################
# generate datasets
def dataset_fixed_cov():
'''Generate 2 Gaussians samples with the same covariance matrix'''
n, dim = 300, 2
np.random.seed(0)
C = np.array([[0., -0.23], [0.83, .23]])
X = np.r_[np.dot(np.random.randn(n, dim), C),
np.dot(np.random.randn(n, dim), C) + np.array([1, 1])]
y = np.hstack((np.zeros(n), np.ones(n)))
return X, y
def dataset_cov():
'''Generate 2 Gaussians samples with different covariance matrices'''
n, dim = 300, 2
np.random.seed(0)
C = np.array([[0., -1.], [2.5, .7]]) * 2.
X = np.r_[np.dot(np.random.randn(n, dim), C),
np.dot(np.random.randn(n, dim), C.T) + np.array([1, 4])]
y = np.hstack((np.zeros(n), np.ones(n)))
return X, y
###############################################################################
# plot functions
def plot_data(lda, X, y, y_pred, fig_index):
splot = plt.subplot(2, 2, fig_index)
if fig_index == 1:
plt.title('Linear Discriminant Analysis')
plt.ylabel('Data with fixed covariance')
elif fig_index == 2:
plt.title('Quadratic Discriminant Analysis')
elif fig_index == 3:
plt.ylabel('Data with varying covariances')
tp = (y == y_pred) # True Positive
tp0, tp1 = tp[y == 0], tp[y == 1]
X0, X1 = X[y == 0], X[y == 1]
X0_tp, X0_fp = X0[tp0], X0[~tp0]
X1_tp, X1_fp = X1[tp1], X1[~tp1]
xmin, xmax = X[:, 0].min(), X[:, 0].max()
ymin, ymax = X[:, 1].min(), X[:, 1].max()
# class 0: dots
plt.plot(X0_tp[:, 0], X0_tp[:, 1], 'o', color='red')
plt.plot(X0_fp[:, 0], X0_fp[:, 1], '.', color='#990000') # dark red
# class 1: dots
plt.plot(X1_tp[:, 0], X1_tp[:, 1], 'o', color='blue')
plt.plot(X1_fp[:, 0], X1_fp[:, 1], '.', color='#000099') # dark blue
# class 0 and 1 : areas
nx, ny = 200, 100
x_min, x_max = plt.xlim()
y_min, y_max = plt.ylim()
xx, yy = np.meshgrid(np.linspace(x_min, x_max, nx),
np.linspace(y_min, y_max, ny))
Z = lda.predict_proba(np.c_[xx.ravel(), yy.ravel()])
Z = Z[:, 1].reshape(xx.shape)
plt.pcolormesh(xx, yy, Z, cmap='red_blue_classes',
norm=colors.Normalize(0., 1.))
plt.contour(xx, yy, Z, [0.5], linewidths=2., colors='k')
# means
plt.plot(lda.means_[0][0], lda.means_[0][1],
'o', color='black', markersize=10)
plt.plot(lda.means_[1][0], lda.means_[1][1],
'o', color='black', markersize=10)
return splot
def plot_ellipse(splot, mean, cov, color):
v, w = linalg.eigh(cov)
u = w[0] / linalg.norm(w[0])
angle = np.arctan(u[1] / u[0])
angle = 180 * angle / np.pi # convert to degrees
# filled Gaussian at 2 standard deviation
ell = mpl.patches.Ellipse(mean, 2 * v[0] ** 0.5, 2 * v[1] ** 0.5,
180 + angle, color=color)
ell.set_clip_box(splot.bbox)
ell.set_alpha(0.5)
splot.add_artist(ell)
splot.set_xticks(())
splot.set_yticks(())
def plot_lda_cov(lda, splot):
plot_ellipse(splot, lda.means_[0], lda.covariance_, 'red')
plot_ellipse(splot, lda.means_[1], lda.covariance_, 'blue')
def plot_qda_cov(qda, splot):
plot_ellipse(splot, qda.means_[0], qda.covariances_[0], 'red')
plot_ellipse(splot, qda.means_[1], qda.covariances_[1], 'blue')
###############################################################################
for i, (X, y) in enumerate([dataset_fixed_cov(), dataset_cov()]):
# LDA
lda = LDA(solver="svd", store_covariance=True)
y_pred = lda.fit(X, y).predict(X)
splot = plot_data(lda, X, y, y_pred, fig_index=2 * i + 1)
plot_lda_cov(lda, splot)
plt.axis('tight')
# QDA
qda = QDA()
y_pred = qda.fit(X, y, store_covariances=True).predict(X)
splot = plot_data(qda, X, y, y_pred, fig_index=2 * i + 2)
plot_qda_cov(qda, splot)
plt.axis('tight')
plt.suptitle('LDA vs QDA')
plt.show()
| bsd-3-clause |
terrycojones/dark-matter | bin/sam-to-fasta-alignment.py | 2 | 3449 | #!/usr/bin/env python
"""
Extract aligned (i.e., padded) queries in FASTA format from a SAM/BAM file.
"""
from __future__ import division, print_function
import sys
import argparse
from dark.filter import (
addFASTAFilteringCommandLineOptions, parseFASTAFilteringCommandLineOptions)
from dark.reads import Reads
from dark.sam import SAMFilter, PaddedSAM
from dark.utils import nucleotidesToStr
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description='Produce aligned FASTA queries from a SAM/BAM file.')
parser.add_argument(
'--rcSuffix', default='',
help=('A string to add to the end of query names that are reverse '
'complemented. This is added before the /1, /2, etc., that are '
'added for duplicated ids (if there are duplicates and '
'--allowDuplicateIds is not used)'))
parser.add_argument(
'--rcNeeded', default=False, action='store_true',
help=('If given, queries that are flagged as matching when reverse '
'complemented will be reverse complemented in the output. This '
'must be used if the program that created the SAM/BAM input '
'flags reversed matches but does not also store the reverse '
'complemented query. The bwa program (mem and aln followed by '
'samse) stores the queries reversed complemented if the match '
'was, so this option is not needed for bwa. If in doubt, test the '
'output of your matching program as this is very important!'))
parser.add_argument(
'--listReferenceInsertions', default=False, action='store_true',
help=('If given, information about reference sequence insertions will be '
'printed to standard error. These correspond to "I" CIGAR '
'operations that for the match would require inserting query bases '
'into the reference. Because we cannot change the reference (in '
'fact we typically do not have the reference in the SAM/BAM file), '
'we cut the inserted bases out of the aligned query and save the '
'information about what would have been inserted and where. That '
'information is printed by this option. The output gives the '
'0-based offset where the inserted base would be placed, followed '
'by a list of the nucleotides that were suggested as being '
'inserted and the number of times each nucleotide was suggested. '
'So for example the output might contain "27: T:3, G:10" which '
'indicates that 13 query (3 with T and 10 with G) matches would '
'insert a nucleotide into the reference at offset 27.'))
SAMFilter.addFilteringOptions(parser)
addFASTAFilteringCommandLineOptions(parser)
args = parser.parse_args()
reads = parseFASTAFilteringCommandLineOptions(args, Reads())
samFilter = SAMFilter.parseFilteringOptions(
args, filterRead=reads.filterRead)
paddedSAM = PaddedSAM(samFilter)
for read in paddedSAM.queries(rcSuffix=args.rcSuffix, rcNeeded=args.rcNeeded):
print(read.toString('fasta'), end='')
if args.listReferenceInsertions:
if paddedSAM.referenceInsertions:
print('(0-based) insertions into the reference:\n%s' %
nucleotidesToStr(paddedSAM.referenceInsertions, ' '),
file=sys.stderr)
else:
print('No matches required an insertion into the reference.',
file=sys.stderr)
| mit |
mmbtba/odoo | addons/hr_payroll/report/report_payslip.py | 377 | 1982 | #-*- coding:utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp.report import report_sxw
class payslip_report(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(payslip_report, self).__init__(cr, uid, name, context)
self.localcontext.update({
'get_payslip_lines': self.get_payslip_lines,
})
def get_payslip_lines(self, obj):
payslip_line = self.pool.get('hr.payslip.line')
res = []
ids = []
for id in range(len(obj)):
if obj[id].appears_on_payslip is True:
ids.append(obj[id].id)
if ids:
res = payslip_line.browse(self.cr, self.uid, ids)
return res
class wrapped_report_payslip(osv.AbstractModel):
_name = 'report.hr_payroll.report_payslip'
_inherit = 'report.abstract_report'
_template = 'hr_payroll.report_payslip'
_wrapped_report_class = payslip_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
mmbtba/odoo | addons/l10n_jp/__openerp__.py | 220 | 1902 | # -*- coding: utf-8 -*-
# Copyright (C) Rooms For (Hong Kong) Limited T/A OSCG
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
{
'name': 'Japan - Accounting',
'version': '1.2',
'category': 'Localization/Account Charts',
'description': """
Overview:
---------
* Chart of Accounts and Taxes template for companies in Japan.
* This probably does not cover all the necessary accounts for a company. \
You are expected to add/delete/modify accounts based on this template.
Note:
-----
* Fiscal positions '内税' and '外税' have been added to handle special \
requirements which might arise from POS implementation. [1] You may not \
need to use these at all under normal circumstances.
[1] See https://github.com/odoo/odoo/pull/6470 for detail.
""",
'author': 'Rooms For (Hong Kong) Limited T/A OSCG',
'website': 'http://www.openerp-asia.net/',
'depends': ['account_chart'],
'data': [
'data/account.account.template.csv',
'data/account.tax.code.template.csv',
'data/account.chart.template.csv',
'data/account.tax.template.csv',
'data/account.fiscal.position.template.csv',
],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
elainenaomi/sciwonc-dataflow-examples | dissertation2017/Experiment 1B/instances/11_1_workflow_full_10files_secondary_w1_3sh_3rs_with_annot_with_proj_3s_hash/taskevent_2/TaskEvent_2.py | 37 | 6282 | #!/usr/bin/env python
"""
Computes per event type:
- how many tasks are per event?
- which is the average and standard deviation of CPU request per event?
- which is the average and standard deviation of Memory request per event?
"""
# It will connect to DataStoreClient
from sciwonc.dataflow.DataStoreClient import DataStoreClient
import sys
##################################################################
import ConfigDB_Task_StatsCPUMemory_2
client_averagecpu = DataStoreClient("mongodb", ConfigDB_Task_StatsCPUMemory_2)
data_averagecpu = client_averagecpu.getData()
if data_averagecpu:
while True:
doc = data_averagecpu.next()
if doc is None:
break;
sd_cpu = doc['standard deviation cpu']
avg_cpu = doc['average cpu']
sd_memory = doc['standard deviation memory']
avg_memory = doc['average memory']
sd_ratio = doc['standard deviation ratio']
avg_ratio = doc['average ratio']
##################################################################
import ConfigDB_Task_GeneralInfo_2
client_geninfo = DataStoreClient("mongodb", ConfigDB_Task_GeneralInfo_2)
data_geninfo = client_geninfo.getData()
if data_geninfo:
while True:
doc = data_geninfo.next()
if doc is None:
break;
print doc
total_items = doc['total items']
##################################################################
import ConfigDB_TaskEvent_2
import math
# connector and config
config = ConfigDB_TaskEvent_2
client = DataStoreClient("mongodb", config)
# according to config
eventList = client.getData() # return an array of docs (like a csv reader)
info = {}
if(eventList):
for index in eventList:
# sum_ratio = 0
total_valid_tasks = 0
total_tasks = 0
event_type = index[config.COLUMN]
sum_memory = 0
sum_cpu = 0
sum_ratio = 0
while True:
doc = index['data'].next()
if doc is None:
break;
total_tasks += 1
if(doc['memory request']):
sum_memory = sum_memory + float(doc['memory request'])
if(doc['CPU request']):
sum_cpu = sum_cpu + float(doc['CPU request'])
if(doc['CPU request']) and (doc['memory request']):
ratio = float(doc['CPU request'])/ float(doc['memory request']) if float(doc['memory request']) > 0 else 0
sum_ratio = sum_ratio + ratio
total_valid_tasks += 1
# average
average_memory = sum_memory / total_valid_tasks if total_valid_tasks > 0 else 0
average_cpu = sum_cpu / total_valid_tasks if total_valid_tasks > 0 else 0
average_ratio = sum_ratio / total_valid_tasks if total_valid_tasks > 0 else 0
event_info = {}
event_info['event type'] = event_type
event_info['sum memory'] = sum_memory
event_info['sum cpu'] = sum_cpu
event_info['sum ratio'] = sum_ratio
event_info['total valid tasks'] = total_valid_tasks
event_info['total tasks'] = total_tasks
event_info['average memory'] = average_memory
event_info['average cpu'] = average_cpu
event_info['average ratio'] = average_ratio
info[event_type] = event_info
# according to config
eventList = client.getData() # return an array of docs (like a csv reader)
output = []
if(eventList):
for index in eventList:
event_type = index[config.COLUMN]
total_variance_memory = 0
total_variance_cpu = 0
total_variance_ratio = 0
average_memory = info[event_type]['average memory']
average_cpu = info[event_type]['average cpu']
average_ratio = info[event_type]['average ratio']
while True:
doc = index['data'].next()
if doc is None:
break;
if(doc['memory request'] and doc['CPU request']):
total_variance_memory = total_variance_memory + (float(doc['memory request']) - average_memory) ** 2
total_variance_cpu = total_variance_cpu + (float(doc['CPU request']) - average_cpu) ** 2
ratio = float(doc['CPU request']) / float(doc['memory request']) if float(doc['memory request']) > 0 else 0
total_variance_ratio = total_variance_ratio + (ratio - average_ratio) ** 2
total_valid_tasks = info[event_type]['total valid tasks'] if total_valid_tasks is not None else 0
newline = {}
newline['event type'] = event_type
newline['sum memory'] = info[event_type]['sum memory']
newline['sum cpu'] = info[event_type]['sum cpu']
newline['sum ratio'] = info[event_type]['sum ratio']
newline['total valid tasks'] = info[event_type]['total valid tasks']
newline['total tasks'] = info[event_type]['total tasks']
newline['average memory'] = average_memory
newline['average cpu'] = average_cpu
newline['average ratio'] = average_ratio
newline['variance memory'] = total_variance_memory/total_valid_tasks if total_valid_tasks > 0 else None
newline['variance cpu'] = total_variance_cpu/total_valid_tasks if total_valid_tasks > 0 else None
newline['variance ratio'] = total_variance_ratio/total_valid_tasks if total_valid_tasks > 0 else None
newline['standard deviation memory'] = math.sqrt(total_variance_memory/total_valid_tasks) if total_valid_tasks > 0 else None
newline['standard deviation cpu'] = math.sqrt(total_variance_cpu/total_valid_tasks) if total_valid_tasks > 0 else None
newline['standard deviation ratio'] = math.sqrt(total_variance_ratio/total_valid_tasks) if total_valid_tasks > 0 else None
newline['percentagem from total'] = total_items * 100.0 / total_valid_tasks if total_valid_tasks > 0 else None
newline['sds from avg cpu'] = (average_cpu - avg_cpu)/sd_cpu if sd_cpu is not None else None
newline['sds from avg memory'] = (average_memory - avg_memory)/sd_memory if sd_memory is not None else None
newline['sds from avg ratio'] = (average_ratio - avg_ratio)/sd_ratio if sd_ratio is not None else None
output.append(newline)
# save
client.saveData(output)
| gpl-3.0 |
crosswalk-project/chromium-crosswalk-efl | tools/perf/page_sets/page_cycler/morejs.py | 34 | 1434 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class MorejsPage(page_module.Page):
def __init__(self, url, page_set):
super(MorejsPage, self).__init__(url=url, page_set=page_set)
class MorejsPageSet(page_set_module.PageSet):
""" More JS page_cycler benchmark """
def __init__(self):
super(MorejsPageSet, self).__init__(
# pylint: disable=C0301
serving_dirs=set(['../../../../data/page_cycler/morejs']),
bucket=page_set_module.PARTNER_BUCKET)
urls_list = [
'file://../../../../data/page_cycler/morejs/blog.chromium.org/',
'file://../../../../data/page_cycler/morejs/dev.chromium.org/',
'file://../../../../data/page_cycler/morejs/googleblog.blogspot.com1/',
'file://../../../../data/page_cycler/morejs/googleblog.blogspot.com2/',
'file://../../../../data/page_cycler/morejs/test.blogspot.com/',
'file://../../../../data/page_cycler/morejs/www.igoogle.com/',
'file://../../../../data/page_cycler/morejs/www.techcrunch.com/',
'file://../../../../data/page_cycler/morejs/www.webkit.org/',
'file://../../../../data/page_cycler/morejs/www.yahoo.com/'
]
for url in urls_list:
self.AddPage(MorejsPage(url, self))
| bsd-3-clause |
rooi/CouchPotatoServer | couchpotato/core/media/_base/searcher/base.py | 10 | 1312 | from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
log = CPLog(__name__)
class SearcherBase(Plugin):
in_progress = False
def __init__(self):
super(SearcherBase, self).__init__()
addEvent('searcher.progress', self.getProgress)
addEvent('%s.searcher.progress' % self.getType(), self.getProgress)
self.initCron()
def initCron(self):
""" Set the searcher cronjob
Make sure to reset cronjob after setting has changed
"""
_type = self.getType()
def setCrons():
fireEvent('schedule.cron', '%s.searcher.all' % _type, self.searchAll,
day = self.conf('cron_day'), hour = self.conf('cron_hour'), minute = self.conf('cron_minute'))
addEvent('app.load', setCrons)
addEvent('setting.save.%s_searcher.cron_day.after' % _type, setCrons)
addEvent('setting.save.%s_searcher.cron_hour.after' % _type, setCrons)
addEvent('setting.save.%s_searcher.cron_minute.after' % _type, setCrons)
def getProgress(self, **kwargs):
""" Return progress of current searcher"""
progress = {
self.getType(): self.in_progress
}
return progress
| gpl-3.0 |
zorbathut/frames | deps/freetype-2.5.3/src/src/tools/docmaker/formatter.py | 515 | 4962 | # Formatter (c) 2002, 2004, 2007, 2008 David Turner <david@freetype.org>
#
from sources import *
from content import *
from utils import *
# This is the base Formatter class. Its purpose is to convert
# a content processor's data into specific documents (i.e., table of
# contents, global index, and individual API reference indices).
#
# You need to sub-class it to output anything sensible. For example,
# the file tohtml.py contains the definition of the HtmlFormatter sub-class
# used to output -- you guessed it -- HTML.
#
class Formatter:
def __init__( self, processor ):
self.processor = processor
self.identifiers = {}
self.chapters = processor.chapters
self.sections = processor.sections.values()
self.block_index = []
# store all blocks in a dictionary
self.blocks = []
for section in self.sections:
for block in section.blocks.values():
self.add_identifier( block.name, block )
# add enumeration values to the index, since this is useful
for markup in block.markups:
if markup.tag == 'values':
for field in markup.fields:
self.add_identifier( field.name, block )
self.block_index = self.identifiers.keys()
self.block_index.sort( index_sort )
def add_identifier( self, name, block ):
if self.identifiers.has_key( name ):
# duplicate name!
sys.stderr.write( \
"WARNING: duplicate definition for '" + name + "' in " + \
block.location() + ", previous definition in " + \
self.identifiers[name].location() + "\n" )
else:
self.identifiers[name] = block
#
# Formatting the table of contents
#
def toc_enter( self ):
pass
def toc_chapter_enter( self, chapter ):
pass
def toc_section_enter( self, section ):
pass
def toc_section_exit( self, section ):
pass
def toc_chapter_exit( self, chapter ):
pass
def toc_index( self, index_filename ):
pass
def toc_exit( self ):
pass
def toc_dump( self, toc_filename = None, index_filename = None ):
output = None
if toc_filename:
output = open_output( toc_filename )
self.toc_enter()
for chap in self.processor.chapters:
self.toc_chapter_enter( chap )
for section in chap.sections:
self.toc_section_enter( section )
self.toc_section_exit( section )
self.toc_chapter_exit( chap )
self.toc_index( index_filename )
self.toc_exit()
if output:
close_output( output )
#
# Formatting the index
#
def index_enter( self ):
pass
def index_name_enter( self, name ):
pass
def index_name_exit( self, name ):
pass
def index_exit( self ):
pass
def index_dump( self, index_filename = None ):
output = None
if index_filename:
output = open_output( index_filename )
self.index_enter()
for name in self.block_index:
self.index_name_enter( name )
self.index_name_exit( name )
self.index_exit()
if output:
close_output( output )
#
# Formatting a section
#
def section_enter( self, section ):
pass
def block_enter( self, block ):
pass
def markup_enter( self, markup, block = None ):
pass
def field_enter( self, field, markup = None, block = None ):
pass
def field_exit( self, field, markup = None, block = None ):
pass
def markup_exit( self, markup, block = None ):
pass
def block_exit( self, block ):
pass
def section_exit( self, section ):
pass
def section_dump( self, section, section_filename = None ):
output = None
if section_filename:
output = open_output( section_filename )
self.section_enter( section )
for name in section.block_names:
block = self.identifiers[name]
self.block_enter( block )
for markup in block.markups[1:]: # always ignore first markup!
self.markup_enter( markup, block )
for field in markup.fields:
self.field_enter( field, markup, block )
self.field_exit( field, markup, block )
self.markup_exit( markup, block )
self.block_exit( block )
self.section_exit( section )
if output:
close_output( output )
def section_dump_all( self ):
for section in self.sections:
self.section_dump( section )
# eof
| gpl-3.0 |
Nicop06/ansible | lib/ansible/vars/unsafe_proxy.py | 51 | 1219 | # (c) 2017, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
# This is backwards compat. unsafe_proxy was moved to avoid circular imports.
from ansible.utils.unsafe_proxy import * # pylint: disable=wildcard-import
try:
from __main__ import display
except:
from ansible.utils.display import Display
display = Display()
display.deprecated('ansible.vars.unsafe_proxy is deprecated. Use ansible.utils.unsafe_proxy instead.', version='2.8')
| gpl-3.0 |
hemanthariyani/omap-kernel | arch/ia64/scripts/unwcheck.py | 13143 | 1714 | #!/usr/bin/python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
| gpl-2.0 |
PlushBeaver/FanFicFare | included_dependencies/html5lib/treebuilders/dom.py | 920 | 8469 | from __future__ import absolute_import, division, unicode_literals
from xml.dom import minidom, Node
import weakref
from . import _base
from .. import constants
from ..constants import namespaces
from ..utils import moduleFactoryFactory
def getDomBuilder(DomImplementation):
Dom = DomImplementation
class AttrList(object):
def __init__(self, element):
self.element = element
def __iter__(self):
return list(self.element.attributes.items()).__iter__()
def __setitem__(self, name, value):
self.element.setAttribute(name, value)
def __len__(self):
return len(list(self.element.attributes.items()))
def items(self):
return [(item[0], item[1]) for item in
list(self.element.attributes.items())]
def keys(self):
return list(self.element.attributes.keys())
def __getitem__(self, name):
return self.element.getAttribute(name)
def __contains__(self, name):
if isinstance(name, tuple):
raise NotImplementedError
else:
return self.element.hasAttribute(name)
class NodeBuilder(_base.Node):
def __init__(self, element):
_base.Node.__init__(self, element.nodeName)
self.element = element
namespace = property(lambda self: hasattr(self.element, "namespaceURI")
and self.element.namespaceURI or None)
def appendChild(self, node):
node.parent = self
self.element.appendChild(node.element)
def insertText(self, data, insertBefore=None):
text = self.element.ownerDocument.createTextNode(data)
if insertBefore:
self.element.insertBefore(text, insertBefore.element)
else:
self.element.appendChild(text)
def insertBefore(self, node, refNode):
self.element.insertBefore(node.element, refNode.element)
node.parent = self
def removeChild(self, node):
if node.element.parentNode == self.element:
self.element.removeChild(node.element)
node.parent = None
def reparentChildren(self, newParent):
while self.element.hasChildNodes():
child = self.element.firstChild
self.element.removeChild(child)
newParent.element.appendChild(child)
self.childNodes = []
def getAttributes(self):
return AttrList(self.element)
def setAttributes(self, attributes):
if attributes:
for name, value in list(attributes.items()):
if isinstance(name, tuple):
if name[0] is not None:
qualifiedName = (name[0] + ":" + name[1])
else:
qualifiedName = name[1]
self.element.setAttributeNS(name[2], qualifiedName,
value)
else:
self.element.setAttribute(
name, value)
attributes = property(getAttributes, setAttributes)
def cloneNode(self):
return NodeBuilder(self.element.cloneNode(False))
def hasContent(self):
return self.element.hasChildNodes()
def getNameTuple(self):
if self.namespace is None:
return namespaces["html"], self.name
else:
return self.namespace, self.name
nameTuple = property(getNameTuple)
class TreeBuilder(_base.TreeBuilder):
def documentClass(self):
self.dom = Dom.getDOMImplementation().createDocument(None, None, None)
return weakref.proxy(self)
def insertDoctype(self, token):
name = token["name"]
publicId = token["publicId"]
systemId = token["systemId"]
domimpl = Dom.getDOMImplementation()
doctype = domimpl.createDocumentType(name, publicId, systemId)
self.document.appendChild(NodeBuilder(doctype))
if Dom == minidom:
doctype.ownerDocument = self.dom
def elementClass(self, name, namespace=None):
if namespace is None and self.defaultNamespace is None:
node = self.dom.createElement(name)
else:
node = self.dom.createElementNS(namespace, name)
return NodeBuilder(node)
def commentClass(self, data):
return NodeBuilder(self.dom.createComment(data))
def fragmentClass(self):
return NodeBuilder(self.dom.createDocumentFragment())
def appendChild(self, node):
self.dom.appendChild(node.element)
def testSerializer(self, element):
return testSerializer(element)
def getDocument(self):
return self.dom
def getFragment(self):
return _base.TreeBuilder.getFragment(self).element
def insertText(self, data, parent=None):
data = data
if parent != self:
_base.TreeBuilder.insertText(self, data, parent)
else:
# HACK: allow text nodes as children of the document node
if hasattr(self.dom, '_child_node_types'):
if Node.TEXT_NODE not in self.dom._child_node_types:
self.dom._child_node_types = list(self.dom._child_node_types)
self.dom._child_node_types.append(Node.TEXT_NODE)
self.dom.appendChild(self.dom.createTextNode(data))
implementation = DomImplementation
name = None
def testSerializer(element):
element.normalize()
rv = []
def serializeElement(element, indent=0):
if element.nodeType == Node.DOCUMENT_TYPE_NODE:
if element.name:
if element.publicId or element.systemId:
publicId = element.publicId or ""
systemId = element.systemId or ""
rv.append("""|%s<!DOCTYPE %s "%s" "%s">""" %
(' ' * indent, element.name, publicId, systemId))
else:
rv.append("|%s<!DOCTYPE %s>" % (' ' * indent, element.name))
else:
rv.append("|%s<!DOCTYPE >" % (' ' * indent,))
elif element.nodeType == Node.DOCUMENT_NODE:
rv.append("#document")
elif element.nodeType == Node.DOCUMENT_FRAGMENT_NODE:
rv.append("#document-fragment")
elif element.nodeType == Node.COMMENT_NODE:
rv.append("|%s<!-- %s -->" % (' ' * indent, element.nodeValue))
elif element.nodeType == Node.TEXT_NODE:
rv.append("|%s\"%s\"" % (' ' * indent, element.nodeValue))
else:
if (hasattr(element, "namespaceURI") and
element.namespaceURI is not None):
name = "%s %s" % (constants.prefixes[element.namespaceURI],
element.nodeName)
else:
name = element.nodeName
rv.append("|%s<%s>" % (' ' * indent, name))
if element.hasAttributes():
attributes = []
for i in range(len(element.attributes)):
attr = element.attributes.item(i)
name = attr.nodeName
value = attr.value
ns = attr.namespaceURI
if ns:
name = "%s %s" % (constants.prefixes[ns], attr.localName)
else:
name = attr.nodeName
attributes.append((name, value))
for name, value in sorted(attributes):
rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
indent += 2
for child in element.childNodes:
serializeElement(child, indent)
serializeElement(element, 0)
return "\n".join(rv)
return locals()
# The actual means to get a module!
getDomModule = moduleFactoryFactory(getDomBuilder)
| gpl-3.0 |
hackerbot/DjangoDev | tests/template_tests/test_logging.py | 210 | 2286 | from __future__ import unicode_literals
import logging
from django.template import Engine, Variable, VariableDoesNotExist
from django.test import SimpleTestCase
class TestHandler(logging.Handler):
def __init__(self):
super(TestHandler, self).__init__()
self.log_record = None
def emit(self, record):
self.log_record = record
class VariableResolveLoggingTests(SimpleTestCase):
def setUp(self):
self.test_handler = TestHandler()
self.logger = logging.getLogger('django.template')
self.original_level = self.logger.level
self.logger.addHandler(self.test_handler)
self.logger.setLevel(logging.DEBUG)
def tearDown(self):
self.logger.removeHandler(self.test_handler)
self.logger.level = self.original_level
def test_log_on_variable_does_not_exist_silent(self):
class TestObject(object):
class SilentDoesNotExist(Exception):
silent_variable_failure = True
@property
def template_name(self):
return "template"
@property
def template(self):
return Engine().from_string('')
@property
def article(self):
raise TestObject.SilentDoesNotExist("Attribute does not exist.")
def __iter__(self):
return iter(attr for attr in dir(TestObject) if attr[:2] != "__")
def __getitem__(self, item):
return self.__dict__[item]
Variable('article').resolve(TestObject())
self.assertEqual(
self.test_handler.log_record.msg,
'template - Attribute does not exist.'
)
def test_log_on_variable_does_not_exist_not_silent(self):
with self.assertRaises(VariableDoesNotExist):
Variable('article.author').resolve({'article': {'section': 'News'}})
self.assertEqual(
self.test_handler.log_record.msg,
'unknown - Failed lookup for key [author] in %r' %
("{%r: %r}" % ('section', 'News'), )
)
def test_no_log_when_variable_exists(self):
Variable('article.section').resolve({'article': {'section': 'News'}})
self.assertIsNone(self.test_handler.log_record)
| bsd-3-clause |
sunu/oppia-test-4 | feconf.py | 1 | 4185 | # coding: utf-8
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Stores various configuration options and constants for Oppia."""
import os
import jinja2
# Code contributors, in alphabetical order.
CODE_CONTRIBUTORS = [
'Jeremy Emerson',
'Koji Ashida',
'Manas Tungare',
'Reinaldo Aguiar',
'Sean Lip',
'Stephanie Federwisch',
'Wilson Hong',
'Yana Malysheva',
]
# Idea contributors, in alphabetical order.
IDEA_CONTRIBUTORS = [
'Alex Kauffmann',
'Catherine Colman',
'John Cox',
'Neil Fraser',
'Pavel Simakov',
'Peter Norvig',
'Phil Wagner',
'Philip Guo',
]
# Demo explorations to load on startup. The id assigned to each exploration
# is based on the index of the exploration in this list, so if you want to
# add a new exploration and preserve the existing ids, add that exploration
# to the end of the list.
# Each item is represented as a tuple: (filename, title, category, image_name).
# The fourth element is optional. Note that the filename omits the .yaml suffix.
# The images are in /data/images.
DEMO_EXPLORATIONS = [
('welcome', 'Welcome to Oppia!', 'Welcome'),
('pitch', 'Pitch Perfect', 'Music', 'pitch.png'),
('counting', 'Three Balls', 'Mathematics', 'counting.png'),
('boot_verbs', 'Boot Verbs', 'Languages', 'boot_verbs.png'),
('hola', '¡Hola!', 'Languages'),
('landmarks', 'Landmarks', 'Geography'),
('adventure', 'Parametrized Adventure', 'Interactive Fiction'),
]
# Whether to unconditionally log info messages.
DEBUG = False
# Whether we should serve the development or production experience.
DEV = (os.environ.get('SERVER_SOFTWARE')
and os.environ['SERVER_SOFTWARE'].startswith('Development'))
# The directory containing third-party files.
THIRD_PARTY_DIR = 'oppia/third_party'
# The directory containing data files for tests.
TESTS_DATA_DIR = 'oppia/tests/data'
# The directories containing sample classifiers, explorations and widgets.
SAMPLE_CLASSIFIERS_DIR = 'data/classifiers'
SAMPLE_EXPLORATIONS_DIR = 'data/explorations'
SAMPLE_IMAGES_DIR = 'data/images'
INTERACTIVE_WIDGETS_DIR = 'data/widgets/interactive'
NONINTERACTIVE_WIDGETS_DIR = 'data/widgets/noninteractive'
OBJECT_TEMPLATES_DIR = 'data/objects/templates'
# The jinja environment used for loading object view and edit templates.
OBJECT_JINJA_ENV = jinja2.Environment(
autoescape=True,
loader=jinja2.FileSystemLoader(os.path.join(
os.path.dirname(__file__), OBJECT_TEMPLATES_DIR))
)
# The jinja environment used for loading frontend templates.
loader = jinja2.FileSystemLoader(os.path.join(
os.path.dirname(__file__),
'oppia/templates/dev/head' if DEV else 'oppia/templates/prod/head'
))
OPPIA_JINJA_ENV = jinja2.Environment(autoescape=True, loader=loader)
def include_js_file(name):
"""Include a raw JS file in the template without evaluating it."""
assert name.endswith('.js')
return jinja2.Markup(loader.get_source(OPPIA_JINJA_ENV, name)[0])
OPPIA_JINJA_ENV.globals['include_js_file'] = include_js_file
OPPIA_JINJA_ENV.filters.update({
'is_list': lambda x: isinstance(x, list),
'is_dict': lambda x: isinstance(x, dict),
})
END_DEST = 'END'
# Default name for a state.
DEFAULT_STATE_NAME = '[untitled state]'
# Default file name for newly-created files for download.
DEFAULT_FILE_NAME = 'New file'
ACCEPTED_IMAGE_FORMATS = ['gif', 'jpeg', 'png']
# Set this to True to allow file uploads via YAML in the gallery and editor pages.
ALLOW_YAML_FILE_UPLOAD = False
# Prefixes for widget ids in the datastore.
INTERACTIVE_PREFIX = 'interactive'
NONINTERACTIVE_PREFIX = 'noninteractive'
| apache-2.0 |
slobberchops/rop | webrop.py | 1 | 3626 | from flask import Flask, Response, jsonify
import logging
logging.basicConfig(filename='web.log', level=logging.INFO)
import config
import os.path
from random import seed
from time import time
import traceback
from importer import ImportPlugins
from opc.matrix import OPCMatrix
# This code remains experimental. Configurtions are contained within the file.
M_WIDTH = 64
M_HEIGHT = 64
DFLT_FLIPTIME_SECS = 30
app = Flask(__name__)
app.config.from_object(__name__)
class Feed(object):
def __init__(self):
matrix = OPCMatrix(M_WIDTH, M_HEIGHT, "echo", fliplr=True)
arts = ImportPlugins("art", ["template.py"], [], None, matrix,
config.config)
if len(arts) == 0:
matrix.terminate()
print "Couldn't find any art to execute"
exit(1)
self.generator = self._frameGenerator(arts, matrix)
self.packet = None
def _frameGenerator(self, arts, matrix):
while True:
seed(time())
for name, art in arts.iteritems():
matrix.hq(False)
matrix.clear()
try:
art.start(matrix)
except Exception as e:
logging.info("start bork: "+ str(e))
logging.info("start bork: "+ traceback.format_exc())
continue
start_time = time()
while time()-start_time < DFLT_FLIPTIME_SECS:
cycle_time = time()
try:
art.refresh(matrix)
except Exception as e:
logging.info("refresh bork: "+ str(e))
logging.info("refresh bork: "+ traceback.format_exc())
break
elapsed = time() - cycle_time
remaining = art.interval()/1000.0 - elapsed
yield {
"interval": remaining,
"expires": time()+remaining,
"data": matrix.show(),
}
def _webHex(self, pix):
return '{0:06x}'.format(((int(pix[0])*0x100) +
int(pix[1]))*0x100 + int(pix[2]))
def produce(self):
if self.packet is None or time() > self.packet["expires"]:
frame = self.generator.next()
data = [[self._webHex(pix) for pix in row]
for row in frame["data"]]
self.packet = {
"interval": frame["interval"],
"expires": frame["expires"],
"data": data,
}
return self.packet
def root_dir(): # pragma: no cover
return os.path.abspath(os.path.dirname(__file__))
def get_file(filename): # pragma: no cover
try:
src = os.path.join(root_dir(), filename)
return open(src).read()
except IOError as exc:
return str(exc)
@app.route('/', methods=['GET'])
def docroot():
content = get_file('index.html')
return Response(content, mimetype="text/html")
@app.route("/initialize.json")
def json_initialize():
packet = {
"xrange": M_WIDTH,
"yrange": M_HEIGHT,
}
return jsonify(packet)
@app.route("/refresh.json")
def json_refresh():
global feed
return jsonify(feed.produce())
if __name__ == "__main__":
global feed
feed = Feed()
app.run(threaded=True, debug=True)
| gpl-3.0 |
alexmandujano/django | tests/view_tests/tests/test_i18n.py | 40 | 10256 | # -*- coding:utf-8 -*-
from __future__ import absolute_import
import gettext
import os
from os import path
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test import LiveServerTestCase, TestCase
from django.test.utils import override_settings
from django.utils import six, unittest
from django.utils._os import upath
from django.utils.module_loading import import_by_path
from django.utils.translation import override
from django.utils.text import javascript_quote
from ..urls import locale_dir
class I18NTests(TestCase):
""" Tests django views in django/views/i18n.py """
def test_setlang(self):
"""
The set_language view can be used to change the session language.
The user is redirected to the 'next' argument if provided.
"""
for lang_code, lang_name in settings.LANGUAGES:
post_data = dict(language=lang_code, next='/views/')
response = self.client.post('/views/i18n/setlang/', data=post_data)
self.assertRedirects(response, 'http://testserver/views/')
self.assertEqual(self.client.session['django_language'], lang_code)
def test_setlang_unsafe_next(self):
"""
The set_language view only redirects to the 'next' argument if it is
"safe".
"""
lang_code, lang_name = settings.LANGUAGES[0]
post_data = dict(language=lang_code, next='//unsafe/redirection/')
response = self.client.post('/views/i18n/setlang/', data=post_data)
self.assertEqual(response.url, 'http://testserver/')
self.assertEqual(self.client.session['django_language'], lang_code)
def test_setlang_reversal(self):
self.assertEqual(reverse('set_language'), '/views/i18n/setlang/')
def test_jsi18n(self):
"""The javascript_catalog can be deployed with language settings"""
for lang_code in ['es', 'fr', 'ru']:
with override(lang_code):
catalog = gettext.translation('djangojs', locale_dir, [lang_code])
if six.PY3:
trans_txt = catalog.gettext('this is to be translated')
else:
trans_txt = catalog.ugettext('this is to be translated')
response = self.client.get('/views/jsi18n/')
# response content must include a line like:
# "this is to be translated": <value of trans_txt Python variable>
# javascript_quote is used to be able to check unicode strings
self.assertContains(response, javascript_quote(trans_txt), 1)
if lang_code == 'fr':
# Message with context (msgctxt)
self.assertContains(response, r'"month name\u0004May": "mai"', 1)
class JsI18NTests(TestCase):
"""
Tests django views in django/views/i18n.py that need to change
settings.LANGUAGE_CODE.
"""
def test_jsi18n_with_missing_en_files(self):
"""
The javascript_catalog shouldn't load the fallback language in the
case that the current selected language is actually the one translated
from, and hence missing translation files completely.
This happens easily when you're translating from English to other
languages and you've set settings.LANGUAGE_CODE to some other language
than English.
"""
with self.settings(LANGUAGE_CODE='es'):
with override('en-us'):
response = self.client.get('/views/jsi18n/')
self.assertNotContains(response, 'esto tiene que ser traducido')
def test_jsi18n_fallback_language(self):
"""
Let's make sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE='fr'):
with override('fi'):
response = self.client.get('/views/jsi18n/')
self.assertContains(response, 'il faut le traduire')
def testI18NLanguageNonEnglishDefault(self):
"""
Check if the Javascript i18n view returns an empty language catalog
if the default language is non-English, the selected language
is English and there is not 'en' translation available. See #13388,
#3594 and #13726 for more details.
"""
with self.settings(LANGUAGE_CODE='fr'):
with override('en-us'):
response = self.client.get('/views/jsi18n/')
self.assertNotContains(response, 'Choisir une heure')
def test_nonenglish_default_english_userpref(self):
"""
Same as above with the difference that there IS an 'en' translation
available. The Javascript i18n view must return a NON empty language catalog
with the proper English translations. See #13726 for more details.
"""
extended_apps = list(settings.INSTALLED_APPS) + ['view_tests.app0']
with self.settings(LANGUAGE_CODE='fr', INSTALLED_APPS=extended_apps):
with override('en-us'):
response = self.client.get('/views/jsi18n_english_translation/')
self.assertContains(response, javascript_quote('this app0 string is to be translated'))
def testI18NLanguageNonEnglishFallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE='fr'):
with override('none'):
response = self.client.get('/views/jsi18n/')
self.assertContains(response, 'Choisir une heure')
class JsI18NTestsMultiPackage(TestCase):
"""
Tests for django views in django/views/i18n.py that need to change
settings.LANGUAGE_CODE and merge JS translation from several packages.
"""
def testI18NLanguageEnglishDefault(self):
"""
Check if the JavaScript i18n view returns a complete language catalog
if the default language is en-us, the selected language has a
translation available and a catalog composed by djangojs domain
translations of multiple Python packages is requested. See #13388,
#3594 and #13514 for more details.
"""
extended_apps = list(settings.INSTALLED_APPS) + ['view_tests.app1', 'view_tests.app2']
with self.settings(LANGUAGE_CODE='en-us', INSTALLED_APPS=extended_apps):
with override('fr'):
response = self.client.get('/views/jsi18n_multi_packages1/')
self.assertContains(response, javascript_quote('il faut traduire cette chaîne de caractères de app1'))
def testI18NDifferentNonEnLangs(self):
"""
Similar to above but with neither default or requested language being
English.
"""
extended_apps = list(settings.INSTALLED_APPS) + ['view_tests.app3', 'view_tests.app4']
with self.settings(LANGUAGE_CODE='fr', INSTALLED_APPS=extended_apps):
with override('es-ar'):
response = self.client.get('/views/jsi18n_multi_packages2/')
self.assertContains(response, javascript_quote('este texto de app3 debe ser traducido'))
def testI18NWithLocalePaths(self):
extended_locale_paths = settings.LOCALE_PATHS + (
path.join(path.dirname(
path.dirname(path.abspath(upath(__file__)))), 'app3', 'locale'),)
with self.settings(LANGUAGE_CODE='es-ar', LOCALE_PATHS=extended_locale_paths):
with override('es-ar'):
response = self.client.get('/views/jsi18n/')
self.assertContains(response,
javascript_quote('este texto de app3 debe ser traducido'))
skip_selenium = not os.environ.get('DJANGO_SELENIUM_TESTS', False)
@unittest.skipIf(skip_selenium, 'Selenium tests not requested')
class JavascriptI18nTests(LiveServerTestCase):
available_apps = []
urls = 'view_tests.urls'
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
@classmethod
def setUpClass(cls):
try:
cls.selenium = import_by_path(cls.webdriver_class)()
except Exception as e:
raise unittest.SkipTest('Selenium webdriver "%s" not installed or '
'not operational: %s' % (cls.webdriver_class, str(e)))
super(JavascriptI18nTests, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.selenium.quit()
super(JavascriptI18nTests, cls).tearDownClass()
@override_settings(LANGUAGE_CODE='de')
def test_javascript_gettext(self):
extended_apps = list(settings.INSTALLED_APPS) + ['view_tests']
with self.settings(INSTALLED_APPS=extended_apps):
self.selenium.get('%s%s' % (self.live_server_url, '/jsi18n_template/'))
elem = self.selenium.find_element_by_id("gettext")
self.assertEqual(elem.text, "Entfernen")
elem = self.selenium.find_element_by_id("ngettext_sing")
self.assertEqual(elem.text, "1 Element")
elem = self.selenium.find_element_by_id("ngettext_plur")
self.assertEqual(elem.text, "455 Elemente")
elem = self.selenium.find_element_by_id("pgettext")
self.assertEqual(elem.text, "Kann")
elem = self.selenium.find_element_by_id("npgettext_sing")
self.assertEqual(elem.text, "1 Resultat")
elem = self.selenium.find_element_by_id("npgettext_plur")
self.assertEqual(elem.text, "455 Resultate")
def test_escaping(self):
extended_apps = list(settings.INSTALLED_APPS) + ['view_tests']
with self.settings(INSTALLED_APPS=extended_apps):
# Force a language via GET otherwise the gettext functions are a noop!
response = self.client.get('/jsi18n_admin/?language=de')
self.assertContains(response, '\\x04')
class JavascriptI18nChromeTests(JavascriptI18nTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class JavascriptI18nIETests(JavascriptI18nTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
| bsd-3-clause |
jeffsee55/ntb-sage | node_modules/gulp-sass/node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/MSVSSettings.py | 383 | 45223 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
r"""Code to validate and convert settings of the Microsoft build tools.
This file contains code to validate and convert settings of the Microsoft
build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(),
and ValidateMSBuildSettings() are the entry points.
This file was created by comparing the projects created by Visual Studio 2008
and Visual Studio 2010 for all available settings through the user interface.
The MSBuild schemas were also considered. They are typically found in the
MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild
"""
import sys
import re
# Dictionaries of settings validators. The key is the tool name, the value is
# a dictionary mapping setting names to validation functions.
_msvs_validators = {}
_msbuild_validators = {}
# A dictionary of settings converters. The key is the tool name, the value is
# a dictionary mapping setting names to conversion functions.
_msvs_to_msbuild_converters = {}
# Tool name mapping from MSVS to MSBuild.
_msbuild_name_of_tool = {}
class _Tool(object):
"""Represents a tool used by MSVS or MSBuild.
Attributes:
msvs_name: The name of the tool in MSVS.
msbuild_name: The name of the tool in MSBuild.
"""
def __init__(self, msvs_name, msbuild_name):
self.msvs_name = msvs_name
self.msbuild_name = msbuild_name
def _AddTool(tool):
"""Adds a tool to the four dictionaries used to process settings.
This only defines the tool. Each setting also needs to be added.
Args:
tool: The _Tool object to be added.
"""
_msvs_validators[tool.msvs_name] = {}
_msbuild_validators[tool.msbuild_name] = {}
_msvs_to_msbuild_converters[tool.msvs_name] = {}
_msbuild_name_of_tool[tool.msvs_name] = tool.msbuild_name
def _GetMSBuildToolSettings(msbuild_settings, tool):
"""Returns an MSBuild tool dictionary. Creates it if needed."""
return msbuild_settings.setdefault(tool.msbuild_name, {})
class _Type(object):
"""Type of settings (Base class)."""
def ValidateMSVS(self, value):
"""Verifies that the value is legal for MSVS.
Args:
value: the value to check for this type.
Raises:
ValueError if value is not valid for MSVS.
"""
def ValidateMSBuild(self, value):
"""Verifies that the value is legal for MSBuild.
Args:
value: the value to check for this type.
Raises:
ValueError if value is not valid for MSBuild.
"""
def ConvertToMSBuild(self, value):
"""Returns the MSBuild equivalent of the MSVS value given.
Args:
value: the MSVS value to convert.
Returns:
the MSBuild equivalent.
Raises:
ValueError if value is not valid.
"""
return value
class _String(_Type):
"""A setting that's just a string."""
def ValidateMSVS(self, value):
if not isinstance(value, basestring):
raise ValueError('expected string; got %r' % value)
def ValidateMSBuild(self, value):
if not isinstance(value, basestring):
raise ValueError('expected string; got %r' % value)
def ConvertToMSBuild(self, value):
# Convert the macros
return ConvertVCMacrosToMSBuild(value)
class _StringList(_Type):
"""A settings that's a list of strings."""
def ValidateMSVS(self, value):
if not isinstance(value, basestring) and not isinstance(value, list):
raise ValueError('expected string list; got %r' % value)
def ValidateMSBuild(self, value):
if not isinstance(value, basestring) and not isinstance(value, list):
raise ValueError('expected string list; got %r' % value)
def ConvertToMSBuild(self, value):
# Convert the macros
if isinstance(value, list):
return [ConvertVCMacrosToMSBuild(i) for i in value]
else:
return ConvertVCMacrosToMSBuild(value)
class _Boolean(_Type):
"""Boolean settings, can have the values 'false' or 'true'."""
def _Validate(self, value):
if value != 'true' and value != 'false':
raise ValueError('expected bool; got %r' % value)
def ValidateMSVS(self, value):
self._Validate(value)
def ValidateMSBuild(self, value):
self._Validate(value)
def ConvertToMSBuild(self, value):
self._Validate(value)
return value
class _Integer(_Type):
"""Integer settings."""
def __init__(self, msbuild_base=10):
_Type.__init__(self)
self._msbuild_base = msbuild_base
def ValidateMSVS(self, value):
# Try to convert, this will raise ValueError if invalid.
self.ConvertToMSBuild(value)
def ValidateMSBuild(self, value):
# Try to convert, this will raise ValueError if invalid.
int(value, self._msbuild_base)
def ConvertToMSBuild(self, value):
msbuild_format = (self._msbuild_base == 10) and '%d' or '0x%04x'
return msbuild_format % int(value)
class _Enumeration(_Type):
"""Type of settings that is an enumeration.
In MSVS, the values are indexes like '0', '1', and '2'.
MSBuild uses text labels that are more representative, like 'Win32'.
Constructor args:
label_list: an array of MSBuild labels that correspond to the MSVS index.
In the rare cases where MSVS has skipped an index value, None is
used in the array to indicate the unused spot.
new: an array of labels that are new to MSBuild.
"""
def __init__(self, label_list, new=None):
_Type.__init__(self)
self._label_list = label_list
self._msbuild_values = set(value for value in label_list
if value is not None)
if new is not None:
self._msbuild_values.update(new)
def ValidateMSVS(self, value):
# Try to convert. It will raise an exception if not valid.
self.ConvertToMSBuild(value)
def ValidateMSBuild(self, value):
if value not in self._msbuild_values:
raise ValueError('unrecognized enumerated value %s' % value)
def ConvertToMSBuild(self, value):
index = int(value)
if index < 0 or index >= len(self._label_list):
raise ValueError('index value (%d) not in expected range [0, %d)' %
(index, len(self._label_list)))
label = self._label_list[index]
if label is None:
raise ValueError('converted value for %s not specified.' % value)
return label
# Instantiate the various generic types.
_boolean = _Boolean()
_integer = _Integer()
# For now, we don't do any special validation on these types:
_string = _String()
_file_name = _String()
_folder_name = _String()
_file_list = _StringList()
_folder_list = _StringList()
_string_list = _StringList()
# Some boolean settings went from numerical values to boolean. The
# mapping is 0: default, 1: false, 2: true.
_newly_boolean = _Enumeration(['', 'false', 'true'])
def _Same(tool, name, setting_type):
"""Defines a setting that has the same name in MSVS and MSBuild.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
name: the name of the setting.
setting_type: the type of this setting.
"""
_Renamed(tool, name, name, setting_type)
def _Renamed(tool, msvs_name, msbuild_name, setting_type):
"""Defines a setting for which the name has changed.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
msvs_name: the name of the MSVS setting.
msbuild_name: the name of the MSBuild setting.
setting_type: the type of this setting.
"""
def _Translate(value, msbuild_settings):
msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value)
_msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS
_msbuild_validators[tool.msbuild_name][msbuild_name] = (
setting_type.ValidateMSBuild)
_msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
def _Moved(tool, settings_name, msbuild_tool_name, setting_type):
_MovedAndRenamed(tool, settings_name, msbuild_tool_name, settings_name,
setting_type)
def _MovedAndRenamed(tool, msvs_settings_name, msbuild_tool_name,
msbuild_settings_name, setting_type):
"""Defines a setting that may have moved to a new section.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
msvs_settings_name: the MSVS name of the setting.
msbuild_tool_name: the name of the MSBuild tool to place the setting under.
msbuild_settings_name: the MSBuild name of the setting.
setting_type: the type of this setting.
"""
def _Translate(value, msbuild_settings):
tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {})
tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value)
_msvs_validators[tool.msvs_name][msvs_settings_name] = (
setting_type.ValidateMSVS)
validator = setting_type.ValidateMSBuild
_msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator
_msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate
def _MSVSOnly(tool, name, setting_type):
"""Defines a setting that is only found in MSVS.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
name: the name of the setting.
setting_type: the type of this setting.
"""
def _Translate(unused_value, unused_msbuild_settings):
# Since this is for MSVS only settings, no translation will happen.
pass
_msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS
_msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
def _MSBuildOnly(tool, name, setting_type):
"""Defines a setting that is only found in MSBuild.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
name: the name of the setting.
setting_type: the type of this setting.
"""
def _Translate(value, msbuild_settings):
# Let msbuild-only properties get translated as-is from msvs_settings.
tool_settings = msbuild_settings.setdefault(tool.msbuild_name, {})
tool_settings[name] = value
_msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild
_msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
def _ConvertedToAdditionalOption(tool, msvs_name, flag):
"""Defines a setting that's handled via a command line option in MSBuild.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
msvs_name: the name of the MSVS setting that if 'true' becomes a flag
flag: the flag to insert at the end of the AdditionalOptions
"""
def _Translate(value, msbuild_settings):
if value == 'true':
tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
if 'AdditionalOptions' in tool_settings:
new_flags = '%s %s' % (tool_settings['AdditionalOptions'], flag)
else:
new_flags = flag
tool_settings['AdditionalOptions'] = new_flags
_msvs_validators[tool.msvs_name][msvs_name] = _boolean.ValidateMSVS
_msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
def _CustomGeneratePreprocessedFile(tool, msvs_name):
def _Translate(value, msbuild_settings):
tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
if value == '0':
tool_settings['PreprocessToFile'] = 'false'
tool_settings['PreprocessSuppressLineNumbers'] = 'false'
elif value == '1': # /P
tool_settings['PreprocessToFile'] = 'true'
tool_settings['PreprocessSuppressLineNumbers'] = 'false'
elif value == '2': # /EP /P
tool_settings['PreprocessToFile'] = 'true'
tool_settings['PreprocessSuppressLineNumbers'] = 'true'
else:
raise ValueError('value must be one of [0, 1, 2]; got %s' % value)
# Create a bogus validator that looks for '0', '1', or '2'
msvs_validator = _Enumeration(['a', 'b', 'c']).ValidateMSVS
_msvs_validators[tool.msvs_name][msvs_name] = msvs_validator
msbuild_validator = _boolean.ValidateMSBuild
msbuild_tool_validators = _msbuild_validators[tool.msbuild_name]
msbuild_tool_validators['PreprocessToFile'] = msbuild_validator
msbuild_tool_validators['PreprocessSuppressLineNumbers'] = msbuild_validator
_msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
fix_vc_macro_slashes_regex_list = ('IntDir', 'OutDir')
fix_vc_macro_slashes_regex = re.compile(
r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list)
)
# Regular expression to detect keys that were generated by exclusion lists
_EXCLUDED_SUFFIX_RE = re.compile('^(.*)_excluded$')
def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
"""Verify that 'setting' is valid if it is generated from an exclusion list.
If the setting appears to be generated from an exclusion list, the root name
is checked.
Args:
setting: A string that is the setting name to validate
settings: A dictionary where the keys are valid settings
error_msg: The message to emit in the event of error
stderr: The stream receiving the error messages.
"""
# This may be unrecognized because it's an exclusion list. If the
# setting name has the _excluded suffix, then check the root name.
unrecognized = True
m = re.match(_EXCLUDED_SUFFIX_RE, setting)
if m:
root_setting = m.group(1)
unrecognized = root_setting not in settings
if unrecognized:
# We don't know this setting. Give a warning.
print >> stderr, error_msg
def FixVCMacroSlashes(s):
"""Replace macros which have excessive following slashes.
These macros are known to have a built-in trailing slash. Furthermore, many
scripts hiccup on processing paths with extra slashes in the middle.
This list is probably not exhaustive. Add as needed.
"""
if '$' in s:
s = fix_vc_macro_slashes_regex.sub(r'\1', s)
return s
def ConvertVCMacrosToMSBuild(s):
"""Convert the the MSVS macros found in the string to the MSBuild equivalent.
This list is probably not exhaustive. Add as needed.
"""
if '$' in s:
replace_map = {
'$(ConfigurationName)': '$(Configuration)',
'$(InputDir)': '%(RelativeDir)',
'$(InputExt)': '%(Extension)',
'$(InputFileName)': '%(Filename)%(Extension)',
'$(InputName)': '%(Filename)',
'$(InputPath)': '%(Identity)',
'$(ParentName)': '$(ProjectFileName)',
'$(PlatformName)': '$(Platform)',
'$(SafeInputName)': '%(Filename)',
}
for old, new in replace_map.iteritems():
s = s.replace(old, new)
s = FixVCMacroSlashes(s)
return s
def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
"""Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+).
Args:
msvs_settings: A dictionary. The key is the tool name. The values are
themselves dictionaries of settings and their values.
stderr: The stream receiving the error messages.
Returns:
A dictionary of MSBuild settings. The key is either the MSBuild tool name
or the empty string (for the global settings). The values are themselves
dictionaries of settings and their values.
"""
msbuild_settings = {}
for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems():
if msvs_tool_name in _msvs_to_msbuild_converters:
msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name]
for msvs_setting, msvs_value in msvs_tool_settings.iteritems():
if msvs_setting in msvs_tool:
# Invoke the translation function.
try:
msvs_tool[msvs_setting](msvs_value, msbuild_settings)
except ValueError, e:
print >> stderr, ('Warning: while converting %s/%s to MSBuild, '
'%s' % (msvs_tool_name, msvs_setting, e))
else:
_ValidateExclusionSetting(msvs_setting,
msvs_tool,
('Warning: unrecognized setting %s/%s '
'while converting to MSBuild.' %
(msvs_tool_name, msvs_setting)),
stderr)
else:
print >> stderr, ('Warning: unrecognized tool %s while converting to '
'MSBuild.' % msvs_tool_name)
return msbuild_settings
def ValidateMSVSSettings(settings, stderr=sys.stderr):
"""Validates that the names of the settings are valid for MSVS.
Args:
settings: A dictionary. The key is the tool name. The values are
themselves dictionaries of settings and their values.
stderr: The stream receiving the error messages.
"""
_ValidateSettings(_msvs_validators, settings, stderr)
def ValidateMSBuildSettings(settings, stderr=sys.stderr):
"""Validates that the names of the settings are valid for MSBuild.
Args:
settings: A dictionary. The key is the tool name. The values are
themselves dictionaries of settings and their values.
stderr: The stream receiving the error messages.
"""
_ValidateSettings(_msbuild_validators, settings, stderr)
def _ValidateSettings(validators, settings, stderr):
"""Validates that the settings are valid for MSBuild or MSVS.
We currently only validate the names of the settings, not their values.
Args:
validators: A dictionary of tools and their validators.
settings: A dictionary. The key is the tool name. The values are
themselves dictionaries of settings and their values.
stderr: The stream receiving the error messages.
"""
for tool_name in settings:
if tool_name in validators:
tool_validators = validators[tool_name]
for setting, value in settings[tool_name].iteritems():
if setting in tool_validators:
try:
tool_validators[setting](value)
except ValueError, e:
print >> stderr, ('Warning: for %s/%s, %s' %
(tool_name, setting, e))
else:
_ValidateExclusionSetting(setting,
tool_validators,
('Warning: unrecognized setting %s/%s' %
(tool_name, setting)),
stderr)
else:
print >> stderr, ('Warning: unrecognized tool %s' % tool_name)
# MSVS and MBuild names of the tools.
_compile = _Tool('VCCLCompilerTool', 'ClCompile')
_link = _Tool('VCLinkerTool', 'Link')
_midl = _Tool('VCMIDLTool', 'Midl')
_rc = _Tool('VCResourceCompilerTool', 'ResourceCompile')
_lib = _Tool('VCLibrarianTool', 'Lib')
_manifest = _Tool('VCManifestTool', 'Manifest')
_masm = _Tool('MASM', 'MASM')
_AddTool(_compile)
_AddTool(_link)
_AddTool(_midl)
_AddTool(_rc)
_AddTool(_lib)
_AddTool(_manifest)
_AddTool(_masm)
# Add sections only found in the MSBuild settings.
_msbuild_validators[''] = {}
_msbuild_validators['ProjectReference'] = {}
_msbuild_validators['ManifestResourceCompile'] = {}
# Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and
# ClCompile in MSBuild.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for
# the schema of the MSBuild ClCompile settings.
# Options that have the same name in MSVS and MSBuild
_Same(_compile, 'AdditionalIncludeDirectories', _folder_list) # /I
_Same(_compile, 'AdditionalOptions', _string_list)
_Same(_compile, 'AdditionalUsingDirectories', _folder_list) # /AI
_Same(_compile, 'AssemblerListingLocation', _file_name) # /Fa
_Same(_compile, 'BrowseInformationFile', _file_name)
_Same(_compile, 'BufferSecurityCheck', _boolean) # /GS
_Same(_compile, 'DisableLanguageExtensions', _boolean) # /Za
_Same(_compile, 'DisableSpecificWarnings', _string_list) # /wd
_Same(_compile, 'EnableFiberSafeOptimizations', _boolean) # /GT
_Same(_compile, 'EnablePREfast', _boolean) # /analyze Visible='false'
_Same(_compile, 'ExpandAttributedSource', _boolean) # /Fx
_Same(_compile, 'FloatingPointExceptions', _boolean) # /fp:except
_Same(_compile, 'ForceConformanceInForLoopScope', _boolean) # /Zc:forScope
_Same(_compile, 'ForcedIncludeFiles', _file_list) # /FI
_Same(_compile, 'ForcedUsingFiles', _file_list) # /FU
_Same(_compile, 'GenerateXMLDocumentationFiles', _boolean) # /doc
_Same(_compile, 'IgnoreStandardIncludePath', _boolean) # /X
_Same(_compile, 'MinimalRebuild', _boolean) # /Gm
_Same(_compile, 'OmitDefaultLibName', _boolean) # /Zl
_Same(_compile, 'OmitFramePointers', _boolean) # /Oy
_Same(_compile, 'PreprocessorDefinitions', _string_list) # /D
_Same(_compile, 'ProgramDataBaseFileName', _file_name) # /Fd
_Same(_compile, 'RuntimeTypeInfo', _boolean) # /GR
_Same(_compile, 'ShowIncludes', _boolean) # /showIncludes
_Same(_compile, 'SmallerTypeCheck', _boolean) # /RTCc
_Same(_compile, 'StringPooling', _boolean) # /GF
_Same(_compile, 'SuppressStartupBanner', _boolean) # /nologo
_Same(_compile, 'TreatWChar_tAsBuiltInType', _boolean) # /Zc:wchar_t
_Same(_compile, 'UndefineAllPreprocessorDefinitions', _boolean) # /u
_Same(_compile, 'UndefinePreprocessorDefinitions', _string_list) # /U
_Same(_compile, 'UseFullPaths', _boolean) # /FC
_Same(_compile, 'WholeProgramOptimization', _boolean) # /GL
_Same(_compile, 'XMLDocumentationFileName', _file_name)
_Same(_compile, 'AssemblerOutput',
_Enumeration(['NoListing',
'AssemblyCode', # /FA
'All', # /FAcs
'AssemblyAndMachineCode', # /FAc
'AssemblyAndSourceCode'])) # /FAs
_Same(_compile, 'BasicRuntimeChecks',
_Enumeration(['Default',
'StackFrameRuntimeCheck', # /RTCs
'UninitializedLocalUsageCheck', # /RTCu
'EnableFastChecks'])) # /RTC1
_Same(_compile, 'BrowseInformation',
_Enumeration(['false',
'true', # /FR
'true'])) # /Fr
_Same(_compile, 'CallingConvention',
_Enumeration(['Cdecl', # /Gd
'FastCall', # /Gr
'StdCall', # /Gz
'VectorCall'])) # /Gv
_Same(_compile, 'CompileAs',
_Enumeration(['Default',
'CompileAsC', # /TC
'CompileAsCpp'])) # /TP
_Same(_compile, 'DebugInformationFormat',
_Enumeration(['', # Disabled
'OldStyle', # /Z7
None,
'ProgramDatabase', # /Zi
'EditAndContinue'])) # /ZI
_Same(_compile, 'EnableEnhancedInstructionSet',
_Enumeration(['NotSet',
'StreamingSIMDExtensions', # /arch:SSE
'StreamingSIMDExtensions2', # /arch:SSE2
'AdvancedVectorExtensions', # /arch:AVX (vs2012+)
'NoExtensions', # /arch:IA32 (vs2012+)
# This one only exists in the new msbuild format.
'AdvancedVectorExtensions2', # /arch:AVX2 (vs2013r2+)
]))
_Same(_compile, 'ErrorReporting',
_Enumeration(['None', # /errorReport:none
'Prompt', # /errorReport:prompt
'Queue'], # /errorReport:queue
new=['Send'])) # /errorReport:send"
_Same(_compile, 'ExceptionHandling',
_Enumeration(['false',
'Sync', # /EHsc
'Async'], # /EHa
new=['SyncCThrow'])) # /EHs
_Same(_compile, 'FavorSizeOrSpeed',
_Enumeration(['Neither',
'Speed', # /Ot
'Size'])) # /Os
_Same(_compile, 'FloatingPointModel',
_Enumeration(['Precise', # /fp:precise
'Strict', # /fp:strict
'Fast'])) # /fp:fast
_Same(_compile, 'InlineFunctionExpansion',
_Enumeration(['Default',
'OnlyExplicitInline', # /Ob1
'AnySuitable'], # /Ob2
new=['Disabled'])) # /Ob0
_Same(_compile, 'Optimization',
_Enumeration(['Disabled', # /Od
'MinSpace', # /O1
'MaxSpeed', # /O2
'Full'])) # /Ox
_Same(_compile, 'RuntimeLibrary',
_Enumeration(['MultiThreaded', # /MT
'MultiThreadedDebug', # /MTd
'MultiThreadedDLL', # /MD
'MultiThreadedDebugDLL'])) # /MDd
_Same(_compile, 'StructMemberAlignment',
_Enumeration(['Default',
'1Byte', # /Zp1
'2Bytes', # /Zp2
'4Bytes', # /Zp4
'8Bytes', # /Zp8
'16Bytes'])) # /Zp16
_Same(_compile, 'WarningLevel',
_Enumeration(['TurnOffAllWarnings', # /W0
'Level1', # /W1
'Level2', # /W2
'Level3', # /W3
'Level4'], # /W4
new=['EnableAllWarnings'])) # /Wall
# Options found in MSVS that have been renamed in MSBuild.
_Renamed(_compile, 'EnableFunctionLevelLinking', 'FunctionLevelLinking',
_boolean) # /Gy
_Renamed(_compile, 'EnableIntrinsicFunctions', 'IntrinsicFunctions',
_boolean) # /Oi
_Renamed(_compile, 'KeepComments', 'PreprocessKeepComments', _boolean) # /C
_Renamed(_compile, 'ObjectFile', 'ObjectFileName', _file_name) # /Fo
_Renamed(_compile, 'OpenMP', 'OpenMPSupport', _boolean) # /openmp
_Renamed(_compile, 'PrecompiledHeaderThrough', 'PrecompiledHeaderFile',
_file_name) # Used with /Yc and /Yu
_Renamed(_compile, 'PrecompiledHeaderFile', 'PrecompiledHeaderOutputFile',
_file_name) # /Fp
_Renamed(_compile, 'UsePrecompiledHeader', 'PrecompiledHeader',
_Enumeration(['NotUsing', # VS recognized '' for this value too.
'Create', # /Yc
'Use'])) # /Yu
_Renamed(_compile, 'WarnAsError', 'TreatWarningAsError', _boolean) # /WX
_ConvertedToAdditionalOption(_compile, 'DefaultCharIsUnsigned', '/J')
# MSVS options not found in MSBuild.
_MSVSOnly(_compile, 'Detect64BitPortabilityProblems', _boolean)
_MSVSOnly(_compile, 'UseUnicodeResponseFiles', _boolean)
# MSBuild options not found in MSVS.
_MSBuildOnly(_compile, 'BuildingInIDE', _boolean)
_MSBuildOnly(_compile, 'CompileAsManaged',
_Enumeration([], new=['false',
'true', # /clr
'Pure', # /clr:pure
'Safe', # /clr:safe
'OldSyntax'])) # /clr:oldSyntax
_MSBuildOnly(_compile, 'CreateHotpatchableImage', _boolean) # /hotpatch
_MSBuildOnly(_compile, 'MultiProcessorCompilation', _boolean) # /MP
_MSBuildOnly(_compile, 'PreprocessOutputPath', _string) # /Fi
_MSBuildOnly(_compile, 'ProcessorNumber', _integer) # the number of processors
_MSBuildOnly(_compile, 'TrackerLogDirectory', _folder_name)
_MSBuildOnly(_compile, 'TreatSpecificWarningsAsErrors', _string_list) # /we
_MSBuildOnly(_compile, 'UseUnicodeForAssemblerListing', _boolean) # /FAu
# Defines a setting that needs very customized processing
_CustomGeneratePreprocessedFile(_compile, 'GeneratePreprocessedFile')
# Directives for converting MSVS VCLinkerTool to MSBuild Link.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for
# the schema of the MSBuild Link settings.
# Options that have the same name in MSVS and MSBuild
_Same(_link, 'AdditionalDependencies', _file_list)
_Same(_link, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH
# /MANIFESTDEPENDENCY:
_Same(_link, 'AdditionalManifestDependencies', _file_list)
_Same(_link, 'AdditionalOptions', _string_list)
_Same(_link, 'AddModuleNamesToAssembly', _file_list) # /ASSEMBLYMODULE
_Same(_link, 'AllowIsolation', _boolean) # /ALLOWISOLATION
_Same(_link, 'AssemblyLinkResource', _file_list) # /ASSEMBLYLINKRESOURCE
_Same(_link, 'BaseAddress', _string) # /BASE
_Same(_link, 'CLRUnmanagedCodeCheck', _boolean) # /CLRUNMANAGEDCODECHECK
_Same(_link, 'DelayLoadDLLs', _file_list) # /DELAYLOAD
_Same(_link, 'DelaySign', _boolean) # /DELAYSIGN
_Same(_link, 'EmbedManagedResourceFile', _file_list) # /ASSEMBLYRESOURCE
_Same(_link, 'EnableUAC', _boolean) # /MANIFESTUAC
_Same(_link, 'EntryPointSymbol', _string) # /ENTRY
_Same(_link, 'ForceSymbolReferences', _file_list) # /INCLUDE
_Same(_link, 'FunctionOrder', _file_name) # /ORDER
_Same(_link, 'GenerateDebugInformation', _boolean) # /DEBUG
_Same(_link, 'GenerateMapFile', _boolean) # /MAP
_Same(_link, 'HeapCommitSize', _string)
_Same(_link, 'HeapReserveSize', _string) # /HEAP
_Same(_link, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB
_Same(_link, 'IgnoreEmbeddedIDL', _boolean) # /IGNOREIDL
_Same(_link, 'ImportLibrary', _file_name) # /IMPLIB
_Same(_link, 'KeyContainer', _file_name) # /KEYCONTAINER
_Same(_link, 'KeyFile', _file_name) # /KEYFILE
_Same(_link, 'ManifestFile', _file_name) # /ManifestFile
_Same(_link, 'MapExports', _boolean) # /MAPINFO:EXPORTS
_Same(_link, 'MapFileName', _file_name)
_Same(_link, 'MergedIDLBaseFileName', _file_name) # /IDLOUT
_Same(_link, 'MergeSections', _string) # /MERGE
_Same(_link, 'MidlCommandFile', _file_name) # /MIDL
_Same(_link, 'ModuleDefinitionFile', _file_name) # /DEF
_Same(_link, 'OutputFile', _file_name) # /OUT
_Same(_link, 'PerUserRedirection', _boolean)
_Same(_link, 'Profile', _boolean) # /PROFILE
_Same(_link, 'ProfileGuidedDatabase', _file_name) # /PGD
_Same(_link, 'ProgramDatabaseFile', _file_name) # /PDB
_Same(_link, 'RegisterOutput', _boolean)
_Same(_link, 'SetChecksum', _boolean) # /RELEASE
_Same(_link, 'StackCommitSize', _string)
_Same(_link, 'StackReserveSize', _string) # /STACK
_Same(_link, 'StripPrivateSymbols', _file_name) # /PDBSTRIPPED
_Same(_link, 'SupportUnloadOfDelayLoadedDLL', _boolean) # /DELAY:UNLOAD
_Same(_link, 'SuppressStartupBanner', _boolean) # /NOLOGO
_Same(_link, 'SwapRunFromCD', _boolean) # /SWAPRUN:CD
_Same(_link, 'TurnOffAssemblyGeneration', _boolean) # /NOASSEMBLY
_Same(_link, 'TypeLibraryFile', _file_name) # /TLBOUT
_Same(_link, 'TypeLibraryResourceID', _integer) # /TLBID
_Same(_link, 'UACUIAccess', _boolean) # /uiAccess='true'
_Same(_link, 'Version', _string) # /VERSION
_Same(_link, 'EnableCOMDATFolding', _newly_boolean) # /OPT:ICF
_Same(_link, 'FixedBaseAddress', _newly_boolean) # /FIXED
_Same(_link, 'LargeAddressAware', _newly_boolean) # /LARGEADDRESSAWARE
_Same(_link, 'OptimizeReferences', _newly_boolean) # /OPT:REF
_Same(_link, 'RandomizedBaseAddress', _newly_boolean) # /DYNAMICBASE
_Same(_link, 'TerminalServerAware', _newly_boolean) # /TSAWARE
_subsystem_enumeration = _Enumeration(
['NotSet',
'Console', # /SUBSYSTEM:CONSOLE
'Windows', # /SUBSYSTEM:WINDOWS
'Native', # /SUBSYSTEM:NATIVE
'EFI Application', # /SUBSYSTEM:EFI_APPLICATION
'EFI Boot Service Driver', # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER
'EFI ROM', # /SUBSYSTEM:EFI_ROM
'EFI Runtime', # /SUBSYSTEM:EFI_RUNTIME_DRIVER
'WindowsCE'], # /SUBSYSTEM:WINDOWSCE
new=['POSIX']) # /SUBSYSTEM:POSIX
_target_machine_enumeration = _Enumeration(
['NotSet',
'MachineX86', # /MACHINE:X86
None,
'MachineARM', # /MACHINE:ARM
'MachineEBC', # /MACHINE:EBC
'MachineIA64', # /MACHINE:IA64
None,
'MachineMIPS', # /MACHINE:MIPS
'MachineMIPS16', # /MACHINE:MIPS16
'MachineMIPSFPU', # /MACHINE:MIPSFPU
'MachineMIPSFPU16', # /MACHINE:MIPSFPU16
None,
None,
None,
'MachineSH4', # /MACHINE:SH4
None,
'MachineTHUMB', # /MACHINE:THUMB
'MachineX64']) # /MACHINE:X64
_Same(_link, 'AssemblyDebug',
_Enumeration(['',
'true', # /ASSEMBLYDEBUG
'false'])) # /ASSEMBLYDEBUG:DISABLE
_Same(_link, 'CLRImageType',
_Enumeration(['Default',
'ForceIJWImage', # /CLRIMAGETYPE:IJW
'ForcePureILImage', # /Switch="CLRIMAGETYPE:PURE
'ForceSafeILImage'])) # /Switch="CLRIMAGETYPE:SAFE
_Same(_link, 'CLRThreadAttribute',
_Enumeration(['DefaultThreadingAttribute', # /CLRTHREADATTRIBUTE:NONE
'MTAThreadingAttribute', # /CLRTHREADATTRIBUTE:MTA
'STAThreadingAttribute'])) # /CLRTHREADATTRIBUTE:STA
_Same(_link, 'DataExecutionPrevention',
_Enumeration(['',
'false', # /NXCOMPAT:NO
'true'])) # /NXCOMPAT
_Same(_link, 'Driver',
_Enumeration(['NotSet',
'Driver', # /Driver
'UpOnly', # /DRIVER:UPONLY
'WDM'])) # /DRIVER:WDM
_Same(_link, 'LinkTimeCodeGeneration',
_Enumeration(['Default',
'UseLinkTimeCodeGeneration', # /LTCG
'PGInstrument', # /LTCG:PGInstrument
'PGOptimization', # /LTCG:PGOptimize
'PGUpdate'])) # /LTCG:PGUpdate
_Same(_link, 'ShowProgress',
_Enumeration(['NotSet',
'LinkVerbose', # /VERBOSE
'LinkVerboseLib'], # /VERBOSE:Lib
new=['LinkVerboseICF', # /VERBOSE:ICF
'LinkVerboseREF', # /VERBOSE:REF
'LinkVerboseSAFESEH', # /VERBOSE:SAFESEH
'LinkVerboseCLR'])) # /VERBOSE:CLR
_Same(_link, 'SubSystem', _subsystem_enumeration)
_Same(_link, 'TargetMachine', _target_machine_enumeration)
_Same(_link, 'UACExecutionLevel',
_Enumeration(['AsInvoker', # /level='asInvoker'
'HighestAvailable', # /level='highestAvailable'
'RequireAdministrator'])) # /level='requireAdministrator'
_Same(_link, 'MinimumRequiredVersion', _string)
_Same(_link, 'TreatLinkerWarningAsErrors', _boolean) # /WX
# Options found in MSVS that have been renamed in MSBuild.
_Renamed(_link, 'ErrorReporting', 'LinkErrorReporting',
_Enumeration(['NoErrorReport', # /ERRORREPORT:NONE
'PromptImmediately', # /ERRORREPORT:PROMPT
'QueueForNextLogin'], # /ERRORREPORT:QUEUE
new=['SendErrorReport'])) # /ERRORREPORT:SEND
_Renamed(_link, 'IgnoreDefaultLibraryNames', 'IgnoreSpecificDefaultLibraries',
_file_list) # /NODEFAULTLIB
_Renamed(_link, 'ResourceOnlyDLL', 'NoEntryPoint', _boolean) # /NOENTRY
_Renamed(_link, 'SwapRunFromNet', 'SwapRunFromNET', _boolean) # /SWAPRUN:NET
_Moved(_link, 'GenerateManifest', '', _boolean)
_Moved(_link, 'IgnoreImportLibrary', '', _boolean)
_Moved(_link, 'LinkIncremental', '', _newly_boolean)
_Moved(_link, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
_Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean)
# MSVS options not found in MSBuild.
_MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean)
_MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean)
# MSBuild options not found in MSVS.
_MSBuildOnly(_link, 'BuildingInIDE', _boolean)
_MSBuildOnly(_link, 'ImageHasSafeExceptionHandlers', _boolean) # /SAFESEH
_MSBuildOnly(_link, 'LinkDLL', _boolean) # /DLL Visible='false'
_MSBuildOnly(_link, 'LinkStatus', _boolean) # /LTCG:STATUS
_MSBuildOnly(_link, 'PreventDllBinding', _boolean) # /ALLOWBIND
_MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean) # /DELAY:NOBIND
_MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name)
_MSBuildOnly(_link, 'MSDOSStubFileName', _file_name) # /STUB Visible='false'
_MSBuildOnly(_link, 'SectionAlignment', _integer) # /ALIGN
_MSBuildOnly(_link, 'SpecifySectionAttributes', _string) # /SECTION
_MSBuildOnly(_link, 'ForceFileOutput',
_Enumeration([], new=['Enabled', # /FORCE
# /FORCE:MULTIPLE
'MultiplyDefinedSymbolOnly',
'UndefinedSymbolOnly'])) # /FORCE:UNRESOLVED
_MSBuildOnly(_link, 'CreateHotPatchableImage',
_Enumeration([], new=['Enabled', # /FUNCTIONPADMIN
'X86Image', # /FUNCTIONPADMIN:5
'X64Image', # /FUNCTIONPADMIN:6
'ItaniumImage'])) # /FUNCTIONPADMIN:16
_MSBuildOnly(_link, 'CLRSupportLastError',
_Enumeration([], new=['Enabled', # /CLRSupportLastError
'Disabled', # /CLRSupportLastError:NO
# /CLRSupportLastError:SYSTEMDLL
'SystemDlls']))
# Directives for converting VCResourceCompilerTool to ResourceCompile.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for
# the schema of the MSBuild ResourceCompile settings.
_Same(_rc, 'AdditionalOptions', _string_list)
_Same(_rc, 'AdditionalIncludeDirectories', _folder_list) # /I
_Same(_rc, 'Culture', _Integer(msbuild_base=16))
_Same(_rc, 'IgnoreStandardIncludePath', _boolean) # /X
_Same(_rc, 'PreprocessorDefinitions', _string_list) # /D
_Same(_rc, 'ResourceOutputFileName', _string) # /fo
_Same(_rc, 'ShowProgress', _boolean) # /v
# There is no UI in VisualStudio 2008 to set the following properties.
# However they are found in CL and other tools. Include them here for
# completeness, as they are very likely to have the same usage pattern.
_Same(_rc, 'SuppressStartupBanner', _boolean) # /nologo
_Same(_rc, 'UndefinePreprocessorDefinitions', _string_list) # /u
# MSBuild options not found in MSVS.
_MSBuildOnly(_rc, 'NullTerminateStrings', _boolean) # /n
_MSBuildOnly(_rc, 'TrackerLogDirectory', _folder_name)
# Directives for converting VCMIDLTool to Midl.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for
# the schema of the MSBuild Midl settings.
_Same(_midl, 'AdditionalIncludeDirectories', _folder_list) # /I
_Same(_midl, 'AdditionalOptions', _string_list)
_Same(_midl, 'CPreprocessOptions', _string) # /cpp_opt
_Same(_midl, 'ErrorCheckAllocations', _boolean) # /error allocation
_Same(_midl, 'ErrorCheckBounds', _boolean) # /error bounds_check
_Same(_midl, 'ErrorCheckEnumRange', _boolean) # /error enum
_Same(_midl, 'ErrorCheckRefPointers', _boolean) # /error ref
_Same(_midl, 'ErrorCheckStubData', _boolean) # /error stub_data
_Same(_midl, 'GenerateStublessProxies', _boolean) # /Oicf
_Same(_midl, 'GenerateTypeLibrary', _boolean)
_Same(_midl, 'HeaderFileName', _file_name) # /h
_Same(_midl, 'IgnoreStandardIncludePath', _boolean) # /no_def_idir
_Same(_midl, 'InterfaceIdentifierFileName', _file_name) # /iid
_Same(_midl, 'MkTypLibCompatible', _boolean) # /mktyplib203
_Same(_midl, 'OutputDirectory', _string) # /out
_Same(_midl, 'PreprocessorDefinitions', _string_list) # /D
_Same(_midl, 'ProxyFileName', _file_name) # /proxy
_Same(_midl, 'RedirectOutputAndErrors', _file_name) # /o
_Same(_midl, 'SuppressStartupBanner', _boolean) # /nologo
_Same(_midl, 'TypeLibraryName', _file_name) # /tlb
_Same(_midl, 'UndefinePreprocessorDefinitions', _string_list) # /U
_Same(_midl, 'WarnAsError', _boolean) # /WX
_Same(_midl, 'DefaultCharType',
_Enumeration(['Unsigned', # /char unsigned
'Signed', # /char signed
'Ascii'])) # /char ascii7
_Same(_midl, 'TargetEnvironment',
_Enumeration(['NotSet',
'Win32', # /env win32
'Itanium', # /env ia64
'X64'])) # /env x64
_Same(_midl, 'EnableErrorChecks',
_Enumeration(['EnableCustom',
'None', # /error none
'All'])) # /error all
_Same(_midl, 'StructMemberAlignment',
_Enumeration(['NotSet',
'1', # Zp1
'2', # Zp2
'4', # Zp4
'8'])) # Zp8
_Same(_midl, 'WarningLevel',
_Enumeration(['0', # /W0
'1', # /W1
'2', # /W2
'3', # /W3
'4'])) # /W4
_Renamed(_midl, 'DLLDataFileName', 'DllDataFileName', _file_name) # /dlldata
_Renamed(_midl, 'ValidateParameters', 'ValidateAllParameters',
_boolean) # /robust
# MSBuild options not found in MSVS.
_MSBuildOnly(_midl, 'ApplicationConfigurationMode', _boolean) # /app_config
_MSBuildOnly(_midl, 'ClientStubFile', _file_name) # /cstub
_MSBuildOnly(_midl, 'GenerateClientFiles',
_Enumeration([], new=['Stub', # /client stub
'None'])) # /client none
_MSBuildOnly(_midl, 'GenerateServerFiles',
_Enumeration([], new=['Stub', # /client stub
'None'])) # /client none
_MSBuildOnly(_midl, 'LocaleID', _integer) # /lcid DECIMAL
_MSBuildOnly(_midl, 'ServerStubFile', _file_name) # /sstub
_MSBuildOnly(_midl, 'SuppressCompilerWarnings', _boolean) # /no_warn
_MSBuildOnly(_midl, 'TrackerLogDirectory', _folder_name)
_MSBuildOnly(_midl, 'TypeLibFormat',
_Enumeration([], new=['NewFormat', # /newtlb
'OldFormat'])) # /oldtlb
# Directives for converting VCLibrarianTool to Lib.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for
# the schema of the MSBuild Lib settings.
_Same(_lib, 'AdditionalDependencies', _file_list)
_Same(_lib, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH
_Same(_lib, 'AdditionalOptions', _string_list)
_Same(_lib, 'ExportNamedFunctions', _string_list) # /EXPORT
_Same(_lib, 'ForceSymbolReferences', _string) # /INCLUDE
_Same(_lib, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB
_Same(_lib, 'IgnoreSpecificDefaultLibraries', _file_list) # /NODEFAULTLIB
_Same(_lib, 'ModuleDefinitionFile', _file_name) # /DEF
_Same(_lib, 'OutputFile', _file_name) # /OUT
_Same(_lib, 'SuppressStartupBanner', _boolean) # /NOLOGO
_Same(_lib, 'UseUnicodeResponseFiles', _boolean)
_Same(_lib, 'LinkTimeCodeGeneration', _boolean) # /LTCG
_Same(_lib, 'TargetMachine', _target_machine_enumeration)
# TODO(jeanluc) _link defines the same value that gets moved to
# ProjectReference. We may want to validate that they are consistent.
_Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
_MSBuildOnly(_lib, 'DisplayLibrary', _string) # /LIST Visible='false'
_MSBuildOnly(_lib, 'ErrorReporting',
_Enumeration([], new=['PromptImmediately', # /ERRORREPORT:PROMPT
'QueueForNextLogin', # /ERRORREPORT:QUEUE
'SendErrorReport', # /ERRORREPORT:SEND
'NoErrorReport'])) # /ERRORREPORT:NONE
_MSBuildOnly(_lib, 'MinimumRequiredVersion', _string)
_MSBuildOnly(_lib, 'Name', _file_name) # /NAME
_MSBuildOnly(_lib, 'RemoveObjects', _file_list) # /REMOVE
_MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration)
_MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name)
_MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean) # /WX
_MSBuildOnly(_lib, 'Verbose', _boolean)
# Directives for converting VCManifestTool to Mt.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for
# the schema of the MSBuild Lib settings.
# Options that have the same name in MSVS and MSBuild
_Same(_manifest, 'AdditionalManifestFiles', _file_list) # /manifest
_Same(_manifest, 'AdditionalOptions', _string_list)
_Same(_manifest, 'AssemblyIdentity', _string) # /identity:
_Same(_manifest, 'ComponentFileName', _file_name) # /dll
_Same(_manifest, 'GenerateCatalogFiles', _boolean) # /makecdfs
_Same(_manifest, 'InputResourceManifests', _string) # /inputresource
_Same(_manifest, 'OutputManifestFile', _file_name) # /out
_Same(_manifest, 'RegistrarScriptFile', _file_name) # /rgs
_Same(_manifest, 'ReplacementsFile', _file_name) # /replacements
_Same(_manifest, 'SuppressStartupBanner', _boolean) # /nologo
_Same(_manifest, 'TypeLibraryFile', _file_name) # /tlb:
_Same(_manifest, 'UpdateFileHashes', _boolean) # /hashupdate
_Same(_manifest, 'UpdateFileHashesSearchPath', _file_name)
_Same(_manifest, 'VerboseOutput', _boolean) # /verbose
# Options that have moved location.
_MovedAndRenamed(_manifest, 'ManifestResourceFile',
'ManifestResourceCompile',
'ResourceOutputFileName',
_file_name)
_Moved(_manifest, 'EmbedManifest', '', _boolean)
# MSVS options not found in MSBuild.
_MSVSOnly(_manifest, 'DependencyInformationFile', _file_name)
_MSVSOnly(_manifest, 'UseFAT32Workaround', _boolean)
_MSVSOnly(_manifest, 'UseUnicodeResponseFiles', _boolean)
# MSBuild options not found in MSVS.
_MSBuildOnly(_manifest, 'EnableDPIAwareness', _boolean)
_MSBuildOnly(_manifest, 'GenerateCategoryTags', _boolean) # /category
_MSBuildOnly(_manifest, 'ManifestFromManagedAssembly',
_file_name) # /managedassemblyname
_MSBuildOnly(_manifest, 'OutputResourceManifests', _string) # /outputresource
_MSBuildOnly(_manifest, 'SuppressDependencyElement', _boolean) # /nodependency
_MSBuildOnly(_manifest, 'TrackerLogDirectory', _folder_name)
# Directives for MASM.
# See "$(VCTargetsPath)\BuildCustomizations\masm.xml" for the schema of the
# MSBuild MASM settings.
# Options that have the same name in MSVS and MSBuild.
_Same(_masm, 'UseSafeExceptionHandlers', _boolean) # /safeseh
| mit |
insomniacslk/dpkt | tests/test-perf.py | 24 | 5648 | #!/usr/bin/env python
import time, sys
import dnet
sys.path.insert(0, '.')
import dpkt
from impacket import ImpactDecoder, ImpactPacket
from openbsd import packet
import scapy
import xstruct
xip = xstruct.structdef('>', [
('v_hl', ('B', 1), (4 << 4) | (dnet.IP_HDR_LEN >> 2)),
('tos', ('B', 1), dnet.IP_TOS_DEFAULT),
('len', ('H', 1), dnet.IP_HDR_LEN),
('id', ('H', 1), 0),
('off', ('H', 1), 0),
('ttl', ('B', 1), dnet.IP_TTL_DEFAULT),
('p', ('B', 1), 0),
('sum', ('H', 1), 0),
('src', ('s', dnet.IP_ADDR_LEN), dnet.IP_ADDR_ANY),
('dst', ('s', dnet.IP_ADDR_LEN), dnet.IP_ADDR_ANY)
])
xudp = xstruct.structdef('>', [
('sport', ('B', 1), 0),
('dport', ('B', 1), 0),
('ulen', ('H', 1), dnet.UDP_HDR_LEN),
('sum', ('H', 1), 0)
])
def compare_create(cnt):
"""
dpkt: 14915.2445937 pps
dpkt (manual): 15494.3632903 pps
impacket: 3929.30572776 pps
openbsd.packet: 1503.7928579 pps
scapy: 348.449269721 pps
xstruct: 88314.8953732 pps
"""
src = dnet.addr('1.2.3.4').ip
dst = dnet.addr('5.6.7.8').ip
data = 'hello world'
start = time.time()
for i in xrange(cnt):
dnet.ip_checksum(
str(dpkt.ip.IP(src=src, dst=dst, p=dnet.IP_PROTO_UDP,
len = dnet.IP_HDR_LEN + dnet.UDP_HDR_LEN + len(data),
data=dpkt.udp.UDP(sport=111, dport=222,
ulen=dnet.UDP_HDR_LEN + len(data),
data=data))))
print 'dpkt:', cnt / (time.time() - start), 'pps'
start = time.time()
for i in xrange(cnt):
dnet.ip_checksum(str(dpkt.ip.IP(src=src, dst=dst, p=dnet.IP_PROTO_UDP,
len=dnet.IP_HDR_LEN + dnet.UDP_HDR_LEN +
len(data))) +
str(dpkt.udp.UDP(sport=111, dport=222,
ulen=dnet.UDP_HDR_LEN + len(data))) +
data)
print 'dpkt (manual):', cnt / (time.time() - start), 'pps'
start = time.time()
for i in xrange(cnt):
ip = ImpactPacket.IP()
ip.set_ip_src('1.2.3.4')
ip.set_ip_dst('5.6.7.8')
udp = ImpactPacket.UDP()
udp.set_uh_sport(111)
udp.set_uh_dport(222)
udp.contains(ImpactPacket.Data(data))
ip.contains(udp)
ip.get_packet()
print 'impacket:', cnt / (time.time() - start), 'pps'
start = time.time()
for i in xrange(cnt):
p = packet.createPacket(packet.IP, packet.UDP)
p['ip'].src = '1.2.3.4'
p['ip'].dst = '5.6.7.8'
p['udp'].sport = 111
p['udp'].dport = 22
p['udp'].payload = data
p.finalise()
p.getRaw()
print 'openbsd.packet:', cnt / (time.time() - start), 'pps'
start = time.time()
for i in xrange(cnt):
ip = scapy.IP(src='1.2.3.4', dst='5.6.7.8') / \
scapy.UDP(sport=111, dport=222) / data
ip.build()
print 'scapy:', cnt / (time.time() - start), 'pps'
start = time.time()
for i in xrange(cnt):
udp = xudp()
udp.sport = 111
udp.dport = 222
udp.ulen = dnet.UDP_HDR_LEN + len(data)
ip = xip()
ip.src = src
ip.dst = dst
ip.p = dnet.IP_PROTO_UDP
ip.len = dnet.IP_HDR_LEN + udp.ulen
dnet.ip_checksum(str(ip) + str(udp) + data)
print 'xstruct:', cnt / (time.time() - start), 'pps'
def compare_parse(cnt):
"""
dpkt: 23347.462887 pps
impacket: 9937.75963595 pps
openbsd.packet: 6826.5955563 pps
scapy: 1461.74727127 pps
xstruct: 206100.202449 pps
"""
s = 'E\x00\x00T\xc2\xf3\x00\x00\xff\x01\xe2\x18\n\x00\x01\x92\n\x00\x01\x0b\x08\x00\xfc\x11:g\x00\x00A,\xc66\x00\x0e\xcf\x12\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f!"#$%&\'()*+,-./01234567'
start = time.time()
for i in xrange(cnt):
dpkt.ip.IP(s)
print 'dpkt:', cnt / (time.time() - start), 'pps'
decoder = ImpactDecoder.IPDecoder()
start = time.time()
for i in xrange(cnt):
decoder.decode(s)
print 'impacket:', cnt / (time.time() - start), 'pps'
start = time.time()
for i in xrange(cnt):
packet.Packet(packet.IP, s)
print 'openbsd.packet:', cnt / (time.time() - start), 'pps'
start = time.time()
for i in xrange(cnt):
scapy.IP(s)
print 'scapy:', cnt / (time.time() - start), 'pps'
start = time.time()
for i in xrange(cnt):
ip = xip(s[:dnet.IP_HDR_LEN])
udp = xudp(s[dnet.IP_HDR_LEN:dnet.IP_HDR_LEN + dnet.UDP_HDR_LEN])
data = s[dnet.IP_HDR_LEN + dnet.UDP_HDR_LEN:]
print 'xstruct:', cnt / (time.time() - start), 'pps'
def compare_checksum(cnt):
s = 'A' * 80
start = time.time()
for i in range(cnt):
dpkt.in_cksum(s)
print 'dpkt.in_cksum:', cnt / (time.time() - start), 'pps'
start = time.time()
for i in range(cnt):
dnet.ip_cksum_carry(dnet.ip_cksum_add(s, 0))
print 'dnet.ip_cksum_add/carry:', cnt / (time.time() - start), 'pps'
def main():
import psyco
psyco.full()
ITER=10000
print 'checksum:'
compare_checksum(100000)
print 'create:'
compare_create(ITER)
print 'parse:'
compare_parse(ITER)
if __name__ == '__main__':
main()
"""
import hotshot, hotshot.stats
prof = hotshot.Profile('/var/tmp/dpkt.prof')
prof.runcall(main)
prof.close()
stats = hotshot.stats.load('/var/tmp/dpkt.prof')
stats.strip_dirs()
stats.sort_stats('time', 'calls')
stats.print_stats(20)
"""
| bsd-3-clause |
kailIII/geraldo | site/newsite/django_1_0/django/template/__init__.py | 11 | 35488 | """
This is the Django template system.
How it works:
The Lexer.tokenize() function converts a template string (i.e., a string containing
markup with custom template tags) to tokens, which can be either plain text
(TOKEN_TEXT), variables (TOKEN_VAR) or block statements (TOKEN_BLOCK).
The Parser() class takes a list of tokens in its constructor, and its parse()
method returns a compiled template -- which is, under the hood, a list of
Node objects.
Each Node is responsible for creating some sort of output -- e.g. simple text
(TextNode), variable values in a given context (VariableNode), results of basic
logic (IfNode), results of looping (ForNode), or anything else. The core Node
types are TextNode, VariableNode, IfNode and ForNode, but plugin modules can
define their own custom node types.
Each Node has a render() method, which takes a Context and returns a string of
the rendered node. For example, the render() method of a Variable Node returns
the variable's value as a string. The render() method of an IfNode returns the
rendered output of whatever was inside the loop, recursively.
The Template class is a convenient wrapper that takes care of template
compilation and rendering.
Usage:
The only thing you should ever use directly in this file is the Template class.
Create a compiled template object with a template_string, then call render()
with a context. In the compilation stage, the TemplateSyntaxError exception
will be raised if the template doesn't have proper syntax.
Sample code:
>>> from django import template
>>> s = u'<html>{% if test %}<h1>{{ varvalue }}</h1>{% endif %}</html>'
>>> t = template.Template(s)
(t is now a compiled template, and its render() method can be called multiple
times with multiple contexts)
>>> c = template.Context({'test':True, 'varvalue': 'Hello'})
>>> t.render(c)
u'<html><h1>Hello</h1></html>'
>>> c = template.Context({'test':False, 'varvalue': 'Hello'})
>>> t.render(c)
u'<html></html>'
"""
import re
from inspect import getargspec
from django.conf import settings
from django.template.context import Context, RequestContext, ContextPopException
from django.utils.itercompat import is_iterable
from django.utils.functional import curry, Promise
from django.utils.text import smart_split
from django.utils.encoding import smart_unicode, force_unicode
from django.utils.translation import ugettext as _
from django.utils.safestring import SafeData, EscapeData, mark_safe, mark_for_escaping
from django.utils.html import escape
__all__ = ('Template', 'Context', 'RequestContext', 'compile_string')
TOKEN_TEXT = 0
TOKEN_VAR = 1
TOKEN_BLOCK = 2
TOKEN_COMMENT = 3
# template syntax constants
FILTER_SEPARATOR = '|'
FILTER_ARGUMENT_SEPARATOR = ':'
VARIABLE_ATTRIBUTE_SEPARATOR = '.'
BLOCK_TAG_START = '{%'
BLOCK_TAG_END = '%}'
VARIABLE_TAG_START = '{{'
VARIABLE_TAG_END = '}}'
COMMENT_TAG_START = '{#'
COMMENT_TAG_END = '#}'
SINGLE_BRACE_START = '{'
SINGLE_BRACE_END = '}'
ALLOWED_VARIABLE_CHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.'
# what to report as the origin for templates that come from non-loader sources
# (e.g. strings)
UNKNOWN_SOURCE="<unknown source>"
# match a variable or block tag and capture the entire tag, including start/end delimiters
tag_re = re.compile('(%s.*?%s|%s.*?%s|%s.*?%s)' % (re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END),
re.escape(VARIABLE_TAG_START), re.escape(VARIABLE_TAG_END),
re.escape(COMMENT_TAG_START), re.escape(COMMENT_TAG_END)))
# global dictionary of libraries that have been loaded using get_library
libraries = {}
# global list of libraries to load by default for a new parser
builtins = []
# True if TEMPLATE_STRING_IF_INVALID contains a format string (%s). None means
# uninitialised.
invalid_var_format_string = None
class TemplateSyntaxError(Exception):
def __str__(self):
try:
import cStringIO as StringIO
except ImportError:
import StringIO
output = StringIO.StringIO()
output.write(Exception.__str__(self))
# Check if we wrapped an exception and print that too.
if hasattr(self, 'exc_info'):
import traceback
output.write('\n\nOriginal ')
e = self.exc_info
traceback.print_exception(e[0], e[1], e[2], 500, output)
return output.getvalue()
class TemplateDoesNotExist(Exception):
pass
class TemplateEncodingError(Exception):
pass
class VariableDoesNotExist(Exception):
def __init__(self, msg, params=()):
self.msg = msg
self.params = params
def __str__(self):
return unicode(self).encode('utf-8')
def __unicode__(self):
return self.msg % tuple([force_unicode(p, errors='replace') for p in self.params])
class InvalidTemplateLibrary(Exception):
pass
class Origin(object):
def __init__(self, name):
self.name = name
def reload(self):
raise NotImplementedError
def __str__(self):
return self.name
class StringOrigin(Origin):
def __init__(self, source):
super(StringOrigin, self).__init__(UNKNOWN_SOURCE)
self.source = source
def reload(self):
return self.source
class Template(object):
def __init__(self, template_string, origin=None, name='<Unknown Template>'):
try:
template_string = smart_unicode(template_string)
except UnicodeDecodeError:
raise TemplateEncodingError("Templates can only be constructed from unicode or UTF-8 strings.")
if settings.TEMPLATE_DEBUG and origin is None:
origin = StringOrigin(template_string)
self.nodelist = compile_string(template_string, origin)
self.name = name
def __iter__(self):
for node in self.nodelist:
for subnode in node:
yield subnode
def render(self, context):
"Display stage -- can be called many times"
return self.nodelist.render(context)
def compile_string(template_string, origin):
"Compiles template_string into NodeList ready for rendering"
if settings.TEMPLATE_DEBUG:
from debug import DebugLexer, DebugParser
lexer_class, parser_class = DebugLexer, DebugParser
else:
lexer_class, parser_class = Lexer, Parser
lexer = lexer_class(template_string, origin)
parser = parser_class(lexer.tokenize())
return parser.parse()
class Token(object):
def __init__(self, token_type, contents):
# token_type must be TOKEN_TEXT, TOKEN_VAR, TOKEN_BLOCK or TOKEN_COMMENT.
self.token_type, self.contents = token_type, contents
def __str__(self):
return '<%s token: "%s...">' % \
({TOKEN_TEXT: 'Text', TOKEN_VAR: 'Var', TOKEN_BLOCK: 'Block', TOKEN_COMMENT: 'Comment'}[self.token_type],
self.contents[:20].replace('\n', ''))
def split_contents(self):
return list(smart_split(self.contents))
class Lexer(object):
def __init__(self, template_string, origin):
self.template_string = template_string
self.origin = origin
def tokenize(self):
"Return a list of tokens from a given template_string."
in_tag = False
result = []
for bit in tag_re.split(self.template_string):
if bit:
result.append(self.create_token(bit, in_tag))
in_tag = not in_tag
return result
def create_token(self, token_string, in_tag):
"""
Convert the given token string into a new Token object and return it.
If in_tag is True, we are processing something that matched a tag,
otherwise it should be treated as a literal string.
"""
if in_tag:
if token_string.startswith(VARIABLE_TAG_START):
token = Token(TOKEN_VAR, token_string[len(VARIABLE_TAG_START):-len(VARIABLE_TAG_END)].strip())
elif token_string.startswith(BLOCK_TAG_START):
token = Token(TOKEN_BLOCK, token_string[len(BLOCK_TAG_START):-len(BLOCK_TAG_END)].strip())
elif token_string.startswith(COMMENT_TAG_START):
token = Token(TOKEN_COMMENT, '')
else:
token = Token(TOKEN_TEXT, token_string)
return token
class Parser(object):
def __init__(self, tokens):
self.tokens = tokens
self.tags = {}
self.filters = {}
for lib in builtins:
self.add_library(lib)
def parse(self, parse_until=None):
if parse_until is None: parse_until = []
nodelist = self.create_nodelist()
while self.tokens:
token = self.next_token()
if token.token_type == TOKEN_TEXT:
self.extend_nodelist(nodelist, TextNode(token.contents), token)
elif token.token_type == TOKEN_VAR:
if not token.contents:
self.empty_variable(token)
filter_expression = self.compile_filter(token.contents)
var_node = self.create_variable_node(filter_expression)
self.extend_nodelist(nodelist, var_node,token)
elif token.token_type == TOKEN_BLOCK:
if token.contents in parse_until:
# put token back on token list so calling code knows why it terminated
self.prepend_token(token)
return nodelist
try:
command = token.contents.split()[0]
except IndexError:
self.empty_block_tag(token)
# execute callback function for this tag and append resulting node
self.enter_command(command, token)
try:
compile_func = self.tags[command]
except KeyError:
self.invalid_block_tag(token, command)
try:
compiled_result = compile_func(self, token)
except TemplateSyntaxError, e:
if not self.compile_function_error(token, e):
raise
self.extend_nodelist(nodelist, compiled_result, token)
self.exit_command()
if parse_until:
self.unclosed_block_tag(parse_until)
return nodelist
def skip_past(self, endtag):
while self.tokens:
token = self.next_token()
if token.token_type == TOKEN_BLOCK and token.contents == endtag:
return
self.unclosed_block_tag([endtag])
def create_variable_node(self, filter_expression):
return VariableNode(filter_expression)
def create_nodelist(self):
return NodeList()
def extend_nodelist(self, nodelist, node, token):
if node.must_be_first and nodelist:
try:
if nodelist.contains_nontext:
raise AttributeError
except AttributeError:
raise TemplateSyntaxError("%r must be the first tag in the template." % node)
if isinstance(nodelist, NodeList) and not isinstance(node, TextNode):
nodelist.contains_nontext = True
nodelist.append(node)
def enter_command(self, command, token):
pass
def exit_command(self):
pass
def error(self, token, msg):
return TemplateSyntaxError(msg)
def empty_variable(self, token):
raise self.error(token, "Empty variable tag")
def empty_block_tag(self, token):
raise self.error(token, "Empty block tag")
def invalid_block_tag(self, token, command):
raise self.error(token, "Invalid block tag: '%s'" % command)
def unclosed_block_tag(self, parse_until):
raise self.error(None, "Unclosed tags: %s " % ', '.join(parse_until))
def compile_function_error(self, token, e):
pass
def next_token(self):
return self.tokens.pop(0)
def prepend_token(self, token):
self.tokens.insert(0, token)
def delete_first_token(self):
del self.tokens[0]
def add_library(self, lib):
self.tags.update(lib.tags)
self.filters.update(lib.filters)
def compile_filter(self, token):
"Convenient wrapper for FilterExpression"
return FilterExpression(token, self)
def find_filter(self, filter_name):
if filter_name in self.filters:
return self.filters[filter_name]
else:
raise TemplateSyntaxError("Invalid filter: '%s'" % filter_name)
class TokenParser(object):
"""
Subclass this and implement the top() method to parse a template line. When
instantiating the parser, pass in the line from the Django template parser.
The parser's "tagname" instance-variable stores the name of the tag that
the filter was called with.
"""
def __init__(self, subject):
self.subject = subject
self.pointer = 0
self.backout = []
self.tagname = self.tag()
def top(self):
"Overload this method to do the actual parsing and return the result."
raise NotImplementedError()
def more(self):
"Returns True if there is more stuff in the tag."
return self.pointer < len(self.subject)
def back(self):
"Undoes the last microparser. Use this for lookahead and backtracking."
if not len(self.backout):
raise TemplateSyntaxError("back called without some previous parsing")
self.pointer = self.backout.pop()
def tag(self):
"A microparser that just returns the next tag from the line."
subject = self.subject
i = self.pointer
if i >= len(subject):
raise TemplateSyntaxError("expected another tag, found end of string: %s" % subject)
p = i
while i < len(subject) and subject[i] not in (' ', '\t'):
i += 1
s = subject[p:i]
while i < len(subject) and subject[i] in (' ', '\t'):
i += 1
self.backout.append(self.pointer)
self.pointer = i
return s
def value(self):
"A microparser that parses for a value: some string constant or variable name."
subject = self.subject
i = self.pointer
if i >= len(subject):
raise TemplateSyntaxError("Searching for value. Expected another value but found end of string: %s" % subject)
if subject[i] in ('"', "'"):
p = i
i += 1
while i < len(subject) and subject[i] != subject[p]:
i += 1
if i >= len(subject):
raise TemplateSyntaxError("Searching for value. Unexpected end of string in column %d: %s" % (i, subject))
i += 1
res = subject[p:i]
while i < len(subject) and subject[i] in (' ', '\t'):
i += 1
self.backout.append(self.pointer)
self.pointer = i
return res
else:
p = i
while i < len(subject) and subject[i] not in (' ', '\t'):
if subject[i] in ('"', "'"):
c = subject[i]
i += 1
while i < len(subject) and subject[i] != c:
i += 1
if i >= len(subject):
raise TemplateSyntaxError("Searching for value. Unexpected end of string in column %d: %s" % (i, subject))
i += 1
s = subject[p:i]
while i < len(subject) and subject[i] in (' ', '\t'):
i += 1
self.backout.append(self.pointer)
self.pointer = i
return s
filter_raw_string = r"""
^%(i18n_open)s"(?P<i18n_constant>%(str)s)"%(i18n_close)s|
^"(?P<constant>%(str)s)"|
^(?P<var>[%(var_chars)s]+)|
(?:%(filter_sep)s
(?P<filter_name>\w+)
(?:%(arg_sep)s
(?:
%(i18n_open)s"(?P<i18n_arg>%(str)s)"%(i18n_close)s|
"(?P<constant_arg>%(str)s)"|
(?P<var_arg>[%(var_chars)s]+)
)
)?
)""" % {
'str': r"""[^"\\]*(?:\\.[^"\\]*)*""",
'var_chars': "\w\." ,
'filter_sep': re.escape(FILTER_SEPARATOR),
'arg_sep': re.escape(FILTER_ARGUMENT_SEPARATOR),
'i18n_open' : re.escape("_("),
'i18n_close' : re.escape(")"),
}
filter_raw_string = filter_raw_string.replace("\n", "").replace(" ", "")
filter_re = re.compile(filter_raw_string, re.UNICODE)
class FilterExpression(object):
"""
Parses a variable token and its optional filters (all as a single string),
and return a list of tuples of the filter name and arguments.
Sample:
>>> token = 'variable|default:"Default value"|date:"Y-m-d"'
>>> p = Parser('')
>>> fe = FilterExpression(token, p)
>>> len(fe.filters)
2
>>> fe.var
<Variable: 'variable'>
This class should never be instantiated outside of the
get_filters_from_token helper function.
"""
def __init__(self, token, parser):
self.token = token
matches = filter_re.finditer(token)
var = None
filters = []
upto = 0
for match in matches:
start = match.start()
if upto != start:
raise TemplateSyntaxError("Could not parse some characters: %s|%s|%s" % \
(token[:upto], token[upto:start], token[start:]))
if var == None:
var, constant, i18n_constant = match.group("var", "constant", "i18n_constant")
if i18n_constant:
var = '"%s"' % _(i18n_constant.replace(r'\"', '"'))
elif constant:
var = '"%s"' % constant.replace(r'\"', '"')
upto = match.end()
if var == None:
raise TemplateSyntaxError("Could not find variable at start of %s" % token)
elif var.find(VARIABLE_ATTRIBUTE_SEPARATOR + '_') > -1 or var[0] == '_':
raise TemplateSyntaxError("Variables and attributes may not begin with underscores: '%s'" % var)
else:
filter_name = match.group("filter_name")
args = []
constant_arg, i18n_arg, var_arg = match.group("constant_arg", "i18n_arg", "var_arg")
if i18n_arg:
args.append((False, _(i18n_arg.replace(r'\"', '"'))))
elif constant_arg is not None:
args.append((False, constant_arg.replace(r'\"', '"')))
elif var_arg:
args.append((True, Variable(var_arg)))
filter_func = parser.find_filter(filter_name)
self.args_check(filter_name,filter_func, args)
filters.append( (filter_func,args))
upto = match.end()
if upto != len(token):
raise TemplateSyntaxError("Could not parse the remainder: '%s' from '%s'" % (token[upto:], token))
self.filters = filters
self.var = Variable(var)
def resolve(self, context, ignore_failures=False):
try:
obj = self.var.resolve(context)
except VariableDoesNotExist:
if ignore_failures:
obj = None
else:
if settings.TEMPLATE_STRING_IF_INVALID:
global invalid_var_format_string
if invalid_var_format_string is None:
invalid_var_format_string = '%s' in settings.TEMPLATE_STRING_IF_INVALID
if invalid_var_format_string:
return settings.TEMPLATE_STRING_IF_INVALID % self.var
return settings.TEMPLATE_STRING_IF_INVALID
else:
obj = settings.TEMPLATE_STRING_IF_INVALID
for func, args in self.filters:
arg_vals = []
for lookup, arg in args:
if not lookup:
arg_vals.append(mark_safe(arg))
else:
arg_vals.append(arg.resolve(context))
if getattr(func, 'needs_autoescape', False):
new_obj = func(obj, autoescape=context.autoescape, *arg_vals)
else:
new_obj = func(obj, *arg_vals)
if getattr(func, 'is_safe', False) and isinstance(obj, SafeData):
obj = mark_safe(new_obj)
elif isinstance(obj, EscapeData):
obj = mark_for_escaping(new_obj)
else:
obj = new_obj
return obj
def args_check(name, func, provided):
provided = list(provided)
plen = len(provided)
# Check to see if a decorator is providing the real function.
func = getattr(func, '_decorated_function', func)
args, varargs, varkw, defaults = getargspec(func)
# First argument is filter input.
args.pop(0)
if defaults:
nondefs = args[:-len(defaults)]
else:
nondefs = args
# Args without defaults must be provided.
try:
for arg in nondefs:
provided.pop(0)
except IndexError:
# Not enough
raise TemplateSyntaxError("%s requires %d arguments, %d provided" % (name, len(nondefs), plen))
# Defaults can be overridden.
defaults = defaults and list(defaults) or []
try:
for parg in provided:
defaults.pop(0)
except IndexError:
# Too many.
raise TemplateSyntaxError("%s requires %d arguments, %d provided" % (name, len(nondefs), plen))
return True
args_check = staticmethod(args_check)
def __str__(self):
return self.token
def resolve_variable(path, context):
"""
Returns the resolved variable, which may contain attribute syntax, within
the given context.
Deprecated; use the Variable class instead.
"""
return Variable(path).resolve(context)
class Variable(object):
"""
A template variable, resolvable against a given context. The variable may be
a hard-coded string (if it begins and ends with single or double quote
marks)::
>>> c = {'article': {'section':u'News'}}
>>> Variable('article.section').resolve(c)
u'News'
>>> Variable('article').resolve(c)
{'section': u'News'}
>>> class AClass: pass
>>> c = AClass()
>>> c.article = AClass()
>>> c.article.section = u'News'
>>> Variable('article.section').resolve(c)
u'News'
(The example assumes VARIABLE_ATTRIBUTE_SEPARATOR is '.')
"""
def __init__(self, var):
self.var = var
self.literal = None
self.lookups = None
self.translate = False
try:
# First try to treat this variable as a number.
#
# Note that this could cause an OverflowError here that we're not
# catching. Since this should only happen at compile time, that's
# probably OK.
self.literal = float(var)
# So it's a float... is it an int? If the original value contained a
# dot or an "e" then it was a float, not an int.
if '.' not in var and 'e' not in var.lower():
self.literal = int(self.literal)
# "2." is invalid
if var.endswith('.'):
raise ValueError
except ValueError:
# A ValueError means that the variable isn't a number.
if var.startswith('_(') and var.endswith(')'):
# The result of the lookup should be translated at rendering
# time.
self.translate = True
var = var[2:-1]
# If it's wrapped with quotes (single or double), then
# we're also dealing with a literal.
if var[0] in "\"'" and var[0] == var[-1]:
self.literal = mark_safe(var[1:-1])
else:
# Otherwise we'll set self.lookups so that resolve() knows we're
# dealing with a bonafide variable
self.lookups = tuple(var.split(VARIABLE_ATTRIBUTE_SEPARATOR))
def resolve(self, context):
"""Resolve this variable against a given context."""
if self.lookups is not None:
# We're dealing with a variable that needs to be resolved
value = self._resolve_lookup(context)
else:
# We're dealing with a literal, so it's already been "resolved"
value = self.literal
if self.translate:
return _(value)
return value
def __repr__(self):
return "<%s: %r>" % (self.__class__.__name__, self.var)
def __str__(self):
return self.var
def _resolve_lookup(self, context):
"""
Performs resolution of a real variable (i.e. not a literal) against the
given context.
As indicated by the method's name, this method is an implementation
detail and shouldn't be called by external code. Use Variable.resolve()
instead.
"""
current = context
for bit in self.lookups:
try: # dictionary lookup
current = current[bit]
except (TypeError, AttributeError, KeyError):
try: # attribute lookup
current = getattr(current, bit)
if callable(current):
if getattr(current, 'alters_data', False):
current = settings.TEMPLATE_STRING_IF_INVALID
else:
try: # method call (assuming no args required)
current = current()
except TypeError: # arguments *were* required
# GOTCHA: This will also catch any TypeError
# raised in the function itself.
current = settings.TEMPLATE_STRING_IF_INVALID # invalid method call
except Exception, e:
if getattr(e, 'silent_variable_failure', False):
current = settings.TEMPLATE_STRING_IF_INVALID
else:
raise
except (TypeError, AttributeError):
try: # list-index lookup
current = current[int(bit)]
except (IndexError, # list index out of range
ValueError, # invalid literal for int()
KeyError, # current is a dict without `int(bit)` key
TypeError, # unsubscriptable object
):
raise VariableDoesNotExist("Failed lookup for key [%s] in %r", (bit, current)) # missing attribute
except Exception, e:
if getattr(e, 'silent_variable_failure', False):
current = settings.TEMPLATE_STRING_IF_INVALID
else:
raise
return current
class Node(object):
# Set this to True for nodes that must be first in the template (although
# they can be preceded by text nodes.
must_be_first = False
def render(self, context):
"Return the node rendered as a string"
pass
def __iter__(self):
yield self
def get_nodes_by_type(self, nodetype):
"Return a list of all nodes (within this node and its nodelist) of the given type"
nodes = []
if isinstance(self, nodetype):
nodes.append(self)
if hasattr(self, 'nodelist'):
nodes.extend(self.nodelist.get_nodes_by_type(nodetype))
return nodes
class NodeList(list):
# Set to True the first time a non-TextNode is inserted by
# extend_nodelist().
contains_nontext = False
def render(self, context):
bits = []
for node in self:
if isinstance(node, Node):
bits.append(self.render_node(node, context))
else:
bits.append(node)
return mark_safe(''.join([force_unicode(b) for b in bits]))
def get_nodes_by_type(self, nodetype):
"Return a list of all nodes of the given type"
nodes = []
for node in self:
nodes.extend(node.get_nodes_by_type(nodetype))
return nodes
def render_node(self, node, context):
return node.render(context)
class TextNode(Node):
def __init__(self, s):
self.s = s
def __repr__(self):
return "<Text Node: '%s'>" % self.s[:25]
def render(self, context):
return self.s
class VariableNode(Node):
def __init__(self, filter_expression):
self.filter_expression = filter_expression
def __repr__(self):
return "<Variable Node: %s>" % self.filter_expression
def render(self, context):
try:
output = force_unicode(self.filter_expression.resolve(context))
except UnicodeDecodeError:
# Unicode conversion can fail sometimes for reasons out of our
# control (e.g. exception rendering). In that case, we fail quietly.
return ''
if (context.autoescape and not isinstance(output, SafeData)) or isinstance(output, EscapeData):
return force_unicode(escape(output))
else:
return force_unicode(output)
def generic_tag_compiler(params, defaults, name, node_class, parser, token):
"Returns a template.Node subclass."
bits = token.split_contents()[1:]
bmax = len(params)
def_len = defaults and len(defaults) or 0
bmin = bmax - def_len
if(len(bits) < bmin or len(bits) > bmax):
if bmin == bmax:
message = "%s takes %s arguments" % (name, bmin)
else:
message = "%s takes between %s and %s arguments" % (name, bmin, bmax)
raise TemplateSyntaxError(message)
return node_class(bits)
class Library(object):
def __init__(self):
self.filters = {}
self.tags = {}
def tag(self, name=None, compile_function=None):
if name == None and compile_function == None:
# @register.tag()
return self.tag_function
elif name != None and compile_function == None:
if(callable(name)):
# @register.tag
return self.tag_function(name)
else:
# @register.tag('somename') or @register.tag(name='somename')
def dec(func):
return self.tag(name, func)
return dec
elif name != None and compile_function != None:
# register.tag('somename', somefunc)
self.tags[name] = compile_function
return compile_function
else:
raise InvalidTemplateLibrary("Unsupported arguments to Library.tag: (%r, %r)", (name, compile_function))
def tag_function(self,func):
self.tags[getattr(func, "_decorated_function", func).__name__] = func
return func
def filter(self, name=None, filter_func=None):
if name == None and filter_func == None:
# @register.filter()
return self.filter_function
elif filter_func == None:
if(callable(name)):
# @register.filter
return self.filter_function(name)
else:
# @register.filter('somename') or @register.filter(name='somename')
def dec(func):
return self.filter(name, func)
return dec
elif name != None and filter_func != None:
# register.filter('somename', somefunc)
self.filters[name] = filter_func
return filter_func
else:
raise InvalidTemplateLibrary("Unsupported arguments to Library.filter: (%r, %r)", (name, filter_func))
def filter_function(self, func):
self.filters[getattr(func, "_decorated_function", func).__name__] = func
return func
def simple_tag(self,func):
params, xx, xxx, defaults = getargspec(func)
class SimpleNode(Node):
def __init__(self, vars_to_resolve):
self.vars_to_resolve = map(Variable, vars_to_resolve)
def render(self, context):
resolved_vars = [var.resolve(context) for var in self.vars_to_resolve]
return func(*resolved_vars)
compile_func = curry(generic_tag_compiler, params, defaults, getattr(func, "_decorated_function", func).__name__, SimpleNode)
compile_func.__doc__ = func.__doc__
self.tag(getattr(func, "_decorated_function", func).__name__, compile_func)
return func
def inclusion_tag(self, file_name, context_class=Context, takes_context=False):
def dec(func):
params, xx, xxx, defaults = getargspec(func)
if takes_context:
if params[0] == 'context':
params = params[1:]
else:
raise TemplateSyntaxError("Any tag function decorated with takes_context=True must have a first argument of 'context'")
class InclusionNode(Node):
def __init__(self, vars_to_resolve):
self.vars_to_resolve = map(Variable, vars_to_resolve)
def render(self, context):
resolved_vars = [var.resolve(context) for var in self.vars_to_resolve]
if takes_context:
args = [context] + resolved_vars
else:
args = resolved_vars
dict = func(*args)
if not getattr(self, 'nodelist', False):
from django.template.loader import get_template, select_template
if not isinstance(file_name, basestring) and is_iterable(file_name):
t = select_template(file_name)
else:
t = get_template(file_name)
self.nodelist = t.nodelist
return self.nodelist.render(context_class(dict,
autoescape=context.autoescape))
compile_func = curry(generic_tag_compiler, params, defaults, getattr(func, "_decorated_function", func).__name__, InclusionNode)
compile_func.__doc__ = func.__doc__
self.tag(getattr(func, "_decorated_function", func).__name__, compile_func)
return func
return dec
def get_library(module_name):
lib = libraries.get(module_name, None)
if not lib:
try:
mod = __import__(module_name, {}, {}, [''])
except ImportError, e:
raise InvalidTemplateLibrary("Could not load template library from %s, %s" % (module_name, e))
try:
lib = mod.register
libraries[module_name] = lib
except AttributeError:
raise InvalidTemplateLibrary("Template library %s does not have a variable named 'register'" % module_name)
return lib
def add_to_builtins(module_name):
builtins.append(get_library(module_name))
add_to_builtins('django.template.defaulttags')
add_to_builtins('django.template.defaultfilters')
| lgpl-3.0 |
chris-chris/tensorflow | tensorflow/contrib/framework/python/ops/ops.py | 30 | 2598 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Classes and functions used to construct graphs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
__all__ = ['get_graph_from_inputs',
'get_name_scope']
def get_graph_from_inputs(op_input_list, graph=None):
"""Returns the appropriate graph to use for the given inputs.
1. If `graph` is provided, we validate that all inputs in `op_input_list` are
from the same graph.
2. Otherwise, we attempt to select a graph from the first Operation- or
Tensor-valued input in `op_input_list`, and validate that all other
such inputs are in the same graph.
3. If the graph was not specified and it could not be inferred from
`op_input_list`, we attempt to use the default graph.
Args:
op_input_list: A list of inputs to an operation, which may include `Tensor`,
`Operation`, and other objects that may be converted to a graph element.
graph: (Optional) The explicit graph to use.
Raises:
TypeError: If `op_input_list` is not a list or tuple, or if graph is not a
Graph.
ValueError: If a graph is explicitly passed and not all inputs are from it,
or if the inputs are from multiple graphs, or we could not find a graph
and there was no default graph.
Returns:
The appropriate graph to use for the given inputs.
"""
# pylint: disable=protected-access
return ops._get_graph_from_inputs(op_input_list, graph)
def get_name_scope():
"""Returns the current name scope of the default graph.
For example:
```python
with tf.name_scope('scope1'):
with tf.name_scope('scope2'):
print(tf.contrib.framework.get_name_scope())
```
would print the string `scope1/scope2`.
Returns:
A string represnting the current name scope.
"""
return ops.get_default_graph().get_name_scope()
| apache-2.0 |
liangazhou/django-rdp | packages/Django-1.8.6/build/lib/django/forms/formsets.py | 100 | 17573 | from __future__ import unicode_literals
from django.core.exceptions import ValidationError
from django.forms import Form
from django.forms.fields import BooleanField, IntegerField
from django.forms.utils import ErrorList
from django.forms.widgets import HiddenInput
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
from django.utils.functional import cached_property
from django.utils.html import html_safe
from django.utils.safestring import mark_safe
from django.utils.six.moves import range
from django.utils.translation import ugettext as _, ungettext
__all__ = ('BaseFormSet', 'formset_factory', 'all_valid')
# special field names
TOTAL_FORM_COUNT = 'TOTAL_FORMS'
INITIAL_FORM_COUNT = 'INITIAL_FORMS'
MIN_NUM_FORM_COUNT = 'MIN_NUM_FORMS'
MAX_NUM_FORM_COUNT = 'MAX_NUM_FORMS'
ORDERING_FIELD_NAME = 'ORDER'
DELETION_FIELD_NAME = 'DELETE'
# default minimum number of forms in a formset
DEFAULT_MIN_NUM = 0
# default maximum number of forms in a formset, to prevent memory exhaustion
DEFAULT_MAX_NUM = 1000
class ManagementForm(Form):
"""
``ManagementForm`` is used to keep track of how many form instances
are displayed on the page. If adding new forms via javascript, you should
increment the count field of this form as well.
"""
def __init__(self, *args, **kwargs):
self.base_fields[TOTAL_FORM_COUNT] = IntegerField(widget=HiddenInput)
self.base_fields[INITIAL_FORM_COUNT] = IntegerField(widget=HiddenInput)
# MIN_NUM_FORM_COUNT and MAX_NUM_FORM_COUNT are output with the rest of
# the management form, but only for the convenience of client-side
# code. The POST value of them returned from the client is not checked.
self.base_fields[MIN_NUM_FORM_COUNT] = IntegerField(required=False, widget=HiddenInput)
self.base_fields[MAX_NUM_FORM_COUNT] = IntegerField(required=False, widget=HiddenInput)
super(ManagementForm, self).__init__(*args, **kwargs)
@html_safe
@python_2_unicode_compatible
class BaseFormSet(object):
"""
A collection of instances of the same Form class.
"""
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList):
self.is_bound = data is not None or files is not None
self.prefix = prefix or self.get_default_prefix()
self.auto_id = auto_id
self.data = data or {}
self.files = files or {}
self.initial = initial
self.error_class = error_class
self._errors = None
self._non_form_errors = None
def __str__(self):
return self.as_table()
def __iter__(self):
"""Yields the forms in the order they should be rendered"""
return iter(self.forms)
def __getitem__(self, index):
"""Returns the form at the given index, based on the rendering order"""
return self.forms[index]
def __len__(self):
return len(self.forms)
def __bool__(self):
"""All formsets have a management form which is not included in the length"""
return True
def __nonzero__(self): # Python 2 compatibility
return type(self).__bool__(self)
@property
def management_form(self):
"""Returns the ManagementForm instance for this FormSet."""
if self.is_bound:
form = ManagementForm(self.data, auto_id=self.auto_id, prefix=self.prefix)
if not form.is_valid():
raise ValidationError(
_('ManagementForm data is missing or has been tampered with'),
code='missing_management_form',
)
else:
form = ManagementForm(auto_id=self.auto_id, prefix=self.prefix, initial={
TOTAL_FORM_COUNT: self.total_form_count(),
INITIAL_FORM_COUNT: self.initial_form_count(),
MIN_NUM_FORM_COUNT: self.min_num,
MAX_NUM_FORM_COUNT: self.max_num
})
return form
def total_form_count(self):
"""Returns the total number of forms in this FormSet."""
if self.is_bound:
# return absolute_max if it is lower than the actual total form
# count in the data; this is DoS protection to prevent clients
# from forcing the server to instantiate arbitrary numbers of
# forms
return min(self.management_form.cleaned_data[TOTAL_FORM_COUNT], self.absolute_max)
else:
initial_forms = self.initial_form_count()
total_forms = max(initial_forms, self.min_num) + self.extra
# Allow all existing related objects/inlines to be displayed,
# but don't allow extra beyond max_num.
if initial_forms > self.max_num >= 0:
total_forms = initial_forms
elif total_forms > self.max_num >= 0:
total_forms = self.max_num
return total_forms
def initial_form_count(self):
"""Returns the number of forms that are required in this FormSet."""
if self.is_bound:
return self.management_form.cleaned_data[INITIAL_FORM_COUNT]
else:
# Use the length of the initial data if it's there, 0 otherwise.
initial_forms = len(self.initial) if self.initial else 0
return initial_forms
@cached_property
def forms(self):
"""
Instantiate forms at first property access.
"""
# DoS protection is included in total_form_count()
forms = [self._construct_form(i) for i in range(self.total_form_count())]
return forms
def _construct_form(self, i, **kwargs):
"""
Instantiates and returns the i-th form instance in a formset.
"""
defaults = {
'auto_id': self.auto_id,
'prefix': self.add_prefix(i),
'error_class': self.error_class,
}
if self.is_bound:
defaults['data'] = self.data
defaults['files'] = self.files
if self.initial and 'initial' not in kwargs:
try:
defaults['initial'] = self.initial[i]
except IndexError:
pass
# Allow extra forms to be empty, unless they're part of
# the minimum forms.
if i >= self.initial_form_count() and i >= self.min_num:
defaults['empty_permitted'] = True
defaults.update(kwargs)
form = self.form(**defaults)
self.add_fields(form, i)
return form
@property
def initial_forms(self):
"""Return a list of all the initial forms in this formset."""
return self.forms[:self.initial_form_count()]
@property
def extra_forms(self):
"""Return a list of all the extra forms in this formset."""
return self.forms[self.initial_form_count():]
@property
def empty_form(self):
form = self.form(
auto_id=self.auto_id,
prefix=self.add_prefix('__prefix__'),
empty_permitted=True,
)
self.add_fields(form, None)
return form
@property
def cleaned_data(self):
"""
Returns a list of form.cleaned_data dicts for every form in self.forms.
"""
if not self.is_valid():
raise AttributeError("'%s' object has no attribute 'cleaned_data'" % self.__class__.__name__)
return [form.cleaned_data for form in self.forms]
@property
def deleted_forms(self):
"""
Returns a list of forms that have been marked for deletion.
"""
if not self.is_valid() or not self.can_delete:
return []
# construct _deleted_form_indexes which is just a list of form indexes
# that have had their deletion widget set to True
if not hasattr(self, '_deleted_form_indexes'):
self._deleted_form_indexes = []
for i in range(0, self.total_form_count()):
form = self.forms[i]
# if this is an extra form and hasn't changed, don't consider it
if i >= self.initial_form_count() and not form.has_changed():
continue
if self._should_delete_form(form):
self._deleted_form_indexes.append(i)
return [self.forms[i] for i in self._deleted_form_indexes]
@property
def ordered_forms(self):
"""
Returns a list of form in the order specified by the incoming data.
Raises an AttributeError if ordering is not allowed.
"""
if not self.is_valid() or not self.can_order:
raise AttributeError("'%s' object has no attribute 'ordered_forms'" % self.__class__.__name__)
# Construct _ordering, which is a list of (form_index, order_field_value)
# tuples. After constructing this list, we'll sort it by order_field_value
# so we have a way to get to the form indexes in the order specified
# by the form data.
if not hasattr(self, '_ordering'):
self._ordering = []
for i in range(0, self.total_form_count()):
form = self.forms[i]
# if this is an extra form and hasn't changed, don't consider it
if i >= self.initial_form_count() and not form.has_changed():
continue
# don't add data marked for deletion to self.ordered_data
if self.can_delete and self._should_delete_form(form):
continue
self._ordering.append((i, form.cleaned_data[ORDERING_FIELD_NAME]))
# After we're done populating self._ordering, sort it.
# A sort function to order things numerically ascending, but
# None should be sorted below anything else. Allowing None as
# a comparison value makes it so we can leave ordering fields
# blank.
def compare_ordering_key(k):
if k[1] is None:
return (1, 0) # +infinity, larger than any number
return (0, k[1])
self._ordering.sort(key=compare_ordering_key)
# Return a list of form.cleaned_data dicts in the order specified by
# the form data.
return [self.forms[i[0]] for i in self._ordering]
@classmethod
def get_default_prefix(cls):
return 'form'
def non_form_errors(self):
"""
Returns an ErrorList of errors that aren't associated with a particular
form -- i.e., from formset.clean(). Returns an empty ErrorList if there
are none.
"""
if self._non_form_errors is None:
self.full_clean()
return self._non_form_errors
@property
def errors(self):
"""
Returns a list of form.errors for every form in self.forms.
"""
if self._errors is None:
self.full_clean()
return self._errors
def total_error_count(self):
"""
Returns the number of errors across all forms in the formset.
"""
return len(self.non_form_errors()) +\
sum(len(form_errors) for form_errors in self.errors)
def _should_delete_form(self, form):
"""
Returns whether or not the form was marked for deletion.
"""
return form.cleaned_data.get(DELETION_FIELD_NAME, False)
def is_valid(self):
"""
Returns True if every form in self.forms is valid.
"""
if not self.is_bound:
return False
# We loop over every form.errors here rather than short circuiting on the
# first failure to make sure validation gets triggered for every form.
forms_valid = True
# This triggers a full clean.
self.errors
for i in range(0, self.total_form_count()):
form = self.forms[i]
if self.can_delete:
if self._should_delete_form(form):
# This form is going to be deleted so any of its errors
# should not cause the entire formset to be invalid.
continue
forms_valid &= form.is_valid()
return forms_valid and not self.non_form_errors()
def full_clean(self):
"""
Cleans all of self.data and populates self._errors and
self._non_form_errors.
"""
self._errors = []
self._non_form_errors = self.error_class()
if not self.is_bound: # Stop further processing.
return
for i in range(0, self.total_form_count()):
form = self.forms[i]
self._errors.append(form.errors)
try:
if (self.validate_max and
self.total_form_count() - len(self.deleted_forms) > self.max_num) or \
self.management_form.cleaned_data[TOTAL_FORM_COUNT] > self.absolute_max:
raise ValidationError(ungettext(
"Please submit %d or fewer forms.",
"Please submit %d or fewer forms.", self.max_num) % self.max_num,
code='too_many_forms',
)
if (self.validate_min and
self.total_form_count() - len(self.deleted_forms) < self.min_num):
raise ValidationError(ungettext(
"Please submit %d or more forms.",
"Please submit %d or more forms.", self.min_num) % self.min_num,
code='too_few_forms')
# Give self.clean() a chance to do cross-form validation.
self.clean()
except ValidationError as e:
self._non_form_errors = self.error_class(e.error_list)
def clean(self):
"""
Hook for doing any extra formset-wide cleaning after Form.clean() has
been called on every form. Any ValidationError raised by this method
will not be associated with a particular form; it will be accessible
via formset.non_form_errors()
"""
pass
def has_changed(self):
"""
Returns true if data in any form differs from initial.
"""
return any(form.has_changed() for form in self)
def add_fields(self, form, index):
"""A hook for adding extra fields on to each form instance."""
if self.can_order:
# Only pre-fill the ordering field for initial forms.
if index is not None and index < self.initial_form_count():
form.fields[ORDERING_FIELD_NAME] = IntegerField(label=_('Order'), initial=index + 1, required=False)
else:
form.fields[ORDERING_FIELD_NAME] = IntegerField(label=_('Order'), required=False)
if self.can_delete:
form.fields[DELETION_FIELD_NAME] = BooleanField(label=_('Delete'), required=False)
def add_prefix(self, index):
return '%s-%s' % (self.prefix, index)
def is_multipart(self):
"""
Returns True if the formset needs to be multipart, i.e. it
has FileInput. Otherwise, False.
"""
if self.forms:
return self.forms[0].is_multipart()
else:
return self.empty_form.is_multipart()
@property
def media(self):
# All the forms on a FormSet are the same, so you only need to
# interrogate the first form for media.
if self.forms:
return self.forms[0].media
else:
return self.empty_form.media
def as_table(self):
"Returns this formset rendered as HTML <tr>s -- excluding the <table></table>."
# XXX: there is no semantic division between forms here, there
# probably should be. It might make sense to render each form as a
# table row with each field as a td.
forms = ' '.join(form.as_table() for form in self)
return mark_safe('\n'.join([six.text_type(self.management_form), forms]))
def as_p(self):
"Returns this formset rendered as HTML <p>s."
forms = ' '.join(form.as_p() for form in self)
return mark_safe('\n'.join([six.text_type(self.management_form), forms]))
def as_ul(self):
"Returns this formset rendered as HTML <li>s."
forms = ' '.join(form.as_ul() for form in self)
return mark_safe('\n'.join([six.text_type(self.management_form), forms]))
def formset_factory(form, formset=BaseFormSet, extra=1, can_order=False,
can_delete=False, max_num=None, validate_max=False,
min_num=None, validate_min=False):
"""Return a FormSet for the given form class."""
if min_num is None:
min_num = DEFAULT_MIN_NUM
if max_num is None:
max_num = DEFAULT_MAX_NUM
# hard limit on forms instantiated, to prevent memory-exhaustion attacks
# limit is simply max_num + DEFAULT_MAX_NUM (which is 2*DEFAULT_MAX_NUM
# if max_num is None in the first place)
absolute_max = max_num + DEFAULT_MAX_NUM
attrs = {'form': form, 'extra': extra,
'can_order': can_order, 'can_delete': can_delete,
'min_num': min_num, 'max_num': max_num,
'absolute_max': absolute_max, 'validate_min': validate_min,
'validate_max': validate_max}
return type(form.__name__ + str('FormSet'), (formset,), attrs)
def all_valid(formsets):
"""Returns true if every formset in formsets is valid."""
valid = True
for formset in formsets:
if not formset.is_valid():
valid = False
return valid
| apache-2.0 |
olapaola/olapaola-android-scripting | python/gdata/src/gdata/tlslite/integration/ClientHelper.py | 285 | 7021 | """
A helper class for using TLS Lite with stdlib clients
(httplib, xmlrpclib, imaplib, poplib).
"""
from gdata.tlslite.Checker import Checker
class ClientHelper:
"""This is a helper class used to integrate TLS Lite with various
TLS clients (e.g. poplib, smtplib, httplib, etc.)"""
def __init__(self,
username=None, password=None, sharedKey=None,
certChain=None, privateKey=None,
cryptoID=None, protocol=None,
x509Fingerprint=None,
x509TrustList=None, x509CommonName=None,
settings = None):
"""
For client authentication, use one of these argument
combinations:
- username, password (SRP)
- username, sharedKey (shared-key)
- certChain, privateKey (certificate)
For server authentication, you can either rely on the
implicit mutual authentication performed by SRP or
shared-keys, or you can do certificate-based server
authentication with one of these argument combinations:
- cryptoID[, protocol] (requires cryptoIDlib)
- x509Fingerprint
- x509TrustList[, x509CommonName] (requires cryptlib_py)
Certificate-based server authentication is compatible with
SRP or certificate-based client authentication. It is
not compatible with shared-keys.
The constructor does not perform the TLS handshake itself, but
simply stores these arguments for later. The handshake is
performed only when this class needs to connect with the
server. Then you should be prepared to handle TLS-specific
exceptions. See the client handshake functions in
L{tlslite.TLSConnection.TLSConnection} for details on which
exceptions might be raised.
@type username: str
@param username: SRP or shared-key username. Requires the
'password' or 'sharedKey' argument.
@type password: str
@param password: SRP password for mutual authentication.
Requires the 'username' argument.
@type sharedKey: str
@param sharedKey: Shared key for mutual authentication.
Requires the 'username' argument.
@type certChain: L{tlslite.X509CertChain.X509CertChain} or
L{cryptoIDlib.CertChain.CertChain}
@param certChain: Certificate chain for client authentication.
Requires the 'privateKey' argument. Excludes the SRP or
shared-key related arguments.
@type privateKey: L{tlslite.utils.RSAKey.RSAKey}
@param privateKey: Private key for client authentication.
Requires the 'certChain' argument. Excludes the SRP or
shared-key related arguments.
@type cryptoID: str
@param cryptoID: cryptoID for server authentication. Mutually
exclusive with the 'x509...' arguments.
@type protocol: str
@param protocol: cryptoID protocol URI for server
authentication. Requires the 'cryptoID' argument.
@type x509Fingerprint: str
@param x509Fingerprint: Hex-encoded X.509 fingerprint for
server authentication. Mutually exclusive with the 'cryptoID'
and 'x509TrustList' arguments.
@type x509TrustList: list of L{tlslite.X509.X509}
@param x509TrustList: A list of trusted root certificates. The
other party must present a certificate chain which extends to
one of these root certificates. The cryptlib_py module must be
installed to use this parameter. Mutually exclusive with the
'cryptoID' and 'x509Fingerprint' arguments.
@type x509CommonName: str
@param x509CommonName: The end-entity certificate's 'CN' field
must match this value. For a web server, this is typically a
server name such as 'www.amazon.com'. Mutually exclusive with
the 'cryptoID' and 'x509Fingerprint' arguments. Requires the
'x509TrustList' argument.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
"""
self.username = None
self.password = None
self.sharedKey = None
self.certChain = None
self.privateKey = None
self.checker = None
#SRP Authentication
if username and password and not \
(sharedKey or certChain or privateKey):
self.username = username
self.password = password
#Shared Key Authentication
elif username and sharedKey and not \
(password or certChain or privateKey):
self.username = username
self.sharedKey = sharedKey
#Certificate Chain Authentication
elif certChain and privateKey and not \
(username or password or sharedKey):
self.certChain = certChain
self.privateKey = privateKey
#No Authentication
elif not password and not username and not \
sharedKey and not certChain and not privateKey:
pass
else:
raise ValueError("Bad parameters")
#Authenticate the server based on its cryptoID or fingerprint
if sharedKey and (cryptoID or protocol or x509Fingerprint):
raise ValueError("Can't use shared keys with other forms of"\
"authentication")
self.checker = Checker(cryptoID, protocol, x509Fingerprint,
x509TrustList, x509CommonName)
self.settings = settings
self.tlsSession = None
def _handshake(self, tlsConnection):
if self.username and self.password:
tlsConnection.handshakeClientSRP(username=self.username,
password=self.password,
checker=self.checker,
settings=self.settings,
session=self.tlsSession)
elif self.username and self.sharedKey:
tlsConnection.handshakeClientSharedKey(username=self.username,
sharedKey=self.sharedKey,
settings=self.settings)
else:
tlsConnection.handshakeClientCert(certChain=self.certChain,
privateKey=self.privateKey,
checker=self.checker,
settings=self.settings,
session=self.tlsSession)
self.tlsSession = tlsConnection.session
| apache-2.0 |
randynobx/ansible | test/units/plugins/connection/test_ssh.py | 20 | 29307 | # -*- coding: utf-8 -*-
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from io import StringIO
import pytest
from ansible import constants as C
from ansible.compat.selectors import SelectorKey, EVENT_READ
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock, PropertyMock
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.module_utils.six.moves import shlex_quote
from ansible.module_utils._text import to_bytes
from ansible.playbook.play_context import PlayContext
from ansible.plugins.connection import ssh
class TestConnectionBaseClass(unittest.TestCase):
def test_plugins_connection_ssh_basic(self):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
# connect just returns self, so assert that
res = conn._connect()
self.assertEqual(conn, res)
ssh.SSHPASS_AVAILABLE = False
self.assertFalse(conn._sshpass_available())
ssh.SSHPASS_AVAILABLE = True
self.assertTrue(conn._sshpass_available())
with patch('subprocess.Popen') as p:
ssh.SSHPASS_AVAILABLE = None
p.return_value = MagicMock()
self.assertTrue(conn._sshpass_available())
ssh.SSHPASS_AVAILABLE = None
p.return_value = None
p.side_effect = OSError()
self.assertFalse(conn._sshpass_available())
conn.close()
self.assertFalse(conn._connected)
def test_plugins_connection_ssh__build_command(self):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn._build_command('ssh')
def test_plugins_connection_ssh_exec_command(self):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn._build_command = MagicMock()
conn._build_command.return_value = 'ssh something something'
conn._run = MagicMock()
conn._run.return_value = (0, 'stdout', 'stderr')
res, stdout, stderr = conn.exec_command('ssh')
res, stdout, stderr = conn.exec_command('ssh', 'this is some data')
def test_plugins_connection_ssh__examine_output(self):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn.check_password_prompt = MagicMock()
conn.check_become_success = MagicMock()
conn.check_incorrect_password = MagicMock()
conn.check_missing_password = MagicMock()
def _check_password_prompt(line):
if b'foo' in line:
return True
return False
def _check_become_success(line):
if b'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz' in line:
return True
return False
def _check_incorrect_password(line):
if b'incorrect password' in line:
return True
return False
def _check_missing_password(line):
if b'bad password' in line:
return True
return False
conn.check_password_prompt.side_effect = _check_password_prompt
conn.check_become_success.side_effect = _check_become_success
conn.check_incorrect_password.side_effect = _check_incorrect_password
conn.check_missing_password.side_effect = _check_missing_password
# test examining output for prompt
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = True
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nfoo\nline 3\nthis should be the remainder', False)
self.assertEqual(output, b'line 1\nline 2\nline 3\n')
self.assertEqual(unprocessed, b'this should be the remainder')
self.assertTrue(conn._flags['become_prompt'])
self.assertFalse(conn._flags['become_success'])
self.assertFalse(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test examining output for become prompt
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = False
pc.success_key = u'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz'
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nBECOME-SUCCESS-abcdefghijklmnopqrstuvxyz\nline 3\n', False)
self.assertEqual(output, b'line 1\nline 2\nline 3\n')
self.assertEqual(unprocessed, b'')
self.assertFalse(conn._flags['become_prompt'])
self.assertTrue(conn._flags['become_success'])
self.assertFalse(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test examining output for become failure
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = False
pc.success_key = None
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nincorrect password\n', True)
self.assertEqual(output, b'line 1\nline 2\nincorrect password\n')
self.assertEqual(unprocessed, b'')
self.assertFalse(conn._flags['become_prompt'])
self.assertFalse(conn._flags['become_success'])
self.assertTrue(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test examining output for missing password
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = False
pc.success_key = None
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nbad password\n', True)
self.assertEqual(output, b'line 1\nbad password\n')
self.assertEqual(unprocessed, b'')
self.assertFalse(conn._flags['become_prompt'])
self.assertFalse(conn._flags['become_success'])
self.assertFalse(conn._flags['become_error'])
self.assertTrue(conn._flags['become_nopasswd_error'])
@patch('time.sleep')
@patch('os.path.exists')
def test_plugins_connection_ssh_put_file(self, mock_ospe, mock_sleep):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn._build_command = MagicMock()
conn._run = MagicMock()
mock_ospe.return_value = True
conn._build_command.return_value = 'some command to run'
conn._run.return_value = (0, '', '')
conn.host = "some_host"
C.ANSIBLE_SSH_RETRIES = 9
# Test with C.DEFAULT_SCP_IF_SSH set to smart
# Test when SFTP works
C.DEFAULT_SCP_IF_SSH = 'smart'
expected_in_data = b' '.join((b'put', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n'
conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._run.assert_called_with('some command to run', expected_in_data, checkrc=False)
# Test when SFTP doesn't work but SCP does
conn._run.side_effect = [(1, 'stdout', 'some errors'), (0, '', '')]
conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._run.assert_called_with('some command to run', None, checkrc=False)
conn._run.side_effect = None
# test with C.DEFAULT_SCP_IF_SSH enabled
C.DEFAULT_SCP_IF_SSH = True
conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._run.assert_called_with('some command to run', None, checkrc=False)
conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._run.assert_called_with('some command to run', None, checkrc=False)
# test with C.DEFAULT_SCP_IF_SSH disabled
C.DEFAULT_SCP_IF_SSH = False
expected_in_data = b' '.join((b'put', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n'
conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._run.assert_called_with('some command to run', expected_in_data, checkrc=False)
expected_in_data = b' '.join((b'put',
to_bytes(shlex_quote('/path/to/in/file/with/unicode-fö〩')),
to_bytes(shlex_quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._run.assert_called_with('some command to run', expected_in_data, checkrc=False)
# test that a non-zero rc raises an error
conn._run.return_value = (1, 'stdout', 'some errors')
self.assertRaises(AnsibleError, conn.put_file, '/path/to/bad/file', '/remote/path/to/file')
# test that a not-found path raises an error
mock_ospe.return_value = False
conn._run.return_value = (0, 'stdout', '')
self.assertRaises(AnsibleFileNotFound, conn.put_file, '/path/to/bad/file', '/remote/path/to/file')
@patch('time.sleep')
def test_plugins_connection_ssh_fetch_file(self, mock_sleep):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn._build_command = MagicMock()
conn._run = MagicMock()
conn._build_command.return_value = 'some command to run'
conn._run.return_value = (0, '', '')
conn.host = "some_host"
C.ANSIBLE_SSH_RETRIES = 9
# Test with C.DEFAULT_SCP_IF_SSH set to smart
# Test when SFTP works
C.DEFAULT_SCP_IF_SSH = 'smart'
expected_in_data = b' '.join((b'get', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n'
conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._run.assert_called_with('some command to run', expected_in_data, checkrc=False)
# Test when SFTP doesn't work but SCP does
conn._run.side_effect = [(1, 'stdout', 'some errors'), (0, '', '')]
conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._run.assert_called_with('some command to run', None, checkrc=False)
conn._run.side_effect = None
# test with C.DEFAULT_SCP_IF_SSH enabled
C.DEFAULT_SCP_IF_SSH = True
conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._run.assert_called_with('some command to run', None, checkrc=False)
conn.fetch_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._run.assert_called_with('some command to run', None, checkrc=False)
# test with C.DEFAULT_SCP_IF_SSH disabled
C.DEFAULT_SCP_IF_SSH = False
expected_in_data = b' '.join((b'get', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n'
conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._run.assert_called_with('some command to run', expected_in_data, checkrc=False)
expected_in_data = b' '.join((b'get',
to_bytes(shlex_quote('/path/to/in/file/with/unicode-fö〩')),
to_bytes(shlex_quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
conn.fetch_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._run.assert_called_with('some command to run', expected_in_data, checkrc=False)
# test that a non-zero rc raises an error
conn._run.return_value = (1, 'stdout', 'some errors')
self.assertRaises(AnsibleError, conn.fetch_file, '/path/to/bad/file', '/remote/path/to/file')
class MockSelector(object):
def __init__(self):
self.files_watched = 0
self.register = MagicMock(side_effect=self._register)
self.unregister = MagicMock(side_effect=self._unregister)
self.close = MagicMock()
self.get_map = MagicMock(side_effect=self._get_map)
self.select = MagicMock()
def _register(self, *args, **kwargs):
self.files_watched += 1
def _unregister(self, *args, **kwargs):
self.files_watched -= 1
def _get_map(self, *args, **kwargs):
return self.files_watched
@pytest.fixture
def mock_run_env(request, mocker):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn._send_initial_data = MagicMock()
conn._examine_output = MagicMock()
conn._terminate_process = MagicMock()
conn.sshpass_pipe = [MagicMock(), MagicMock()]
request.cls.pc = pc
request.cls.conn = conn
mock_popen_res = MagicMock()
mock_popen_res.poll = MagicMock()
mock_popen_res.wait = MagicMock()
mock_popen_res.stdin = MagicMock()
mock_popen_res.stdin.fileno.return_value = 1000
mock_popen_res.stdout = MagicMock()
mock_popen_res.stdout.fileno.return_value = 1001
mock_popen_res.stderr = MagicMock()
mock_popen_res.stderr.fileno.return_value = 1002
mock_popen_res.returncode = 0
request.cls.mock_popen_res = mock_popen_res
mock_popen = mocker.patch('subprocess.Popen', return_value=mock_popen_res)
request.cls.mock_popen = mock_popen
request.cls.mock_selector = MockSelector()
mocker.patch('ansible.compat.selectors.DefaultSelector', lambda: request.cls.mock_selector)
request.cls.mock_openpty = mocker.patch('pty.openpty')
mocker.patch('fcntl.fcntl')
mocker.patch('os.write')
mocker.patch('os.close')
@pytest.mark.usefixtures('mock_run_env')
class TestSSHConnectionRun(object):
# FIXME:
# These tests are little more than a smoketest. Need to enhance them
# a bit to check that they're calling the relevant functions and making
# complete coverage of the code paths
def test_no_escalation(self):
self.mock_popen_res.stdout.read.side_effect = [b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"my_stderr"]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "this is input data")
assert return_code == 0
assert b_stdout == b'my_stdout\nsecond_line'
assert b_stderr == b'my_stderr'
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is True
assert self.conn._send_initial_data.call_count == 1
assert self.conn._send_initial_data.call_args[0][1] == 'this is input data'
def test_with_password(self):
# test with a password set to trigger the sshpass write
self.pc.password = '12345'
self.mock_popen_res.stdout.read.side_effect = [b"some data", b"", b""]
self.mock_popen_res.stderr.read.side_effect = [b""]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run(["ssh", "is", "a", "cmd"], "this is more data")
assert return_code == 0
assert b_stdout == b'some data'
assert b_stderr == b''
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is True
assert self.conn._send_initial_data.call_count == 1
assert self.conn._send_initial_data.call_args[0][1] == 'this is more data'
def _password_with_prompt_examine_output(self, sourice, state, b_chunk, sudoable):
if state == 'awaiting_prompt':
self.conn._flags['become_prompt'] = True
elif state == 'awaiting_escalation':
self.conn._flags['become_success'] = True
return (b'', b'')
def test_pasword_with_prompt(self):
# test with password prompting enabled
self.pc.password = None
self.pc.prompt = b'Password:'
self.conn._examine_output.side_effect = self._password_with_prompt_examine_output
self.mock_popen_res.stdout.read.side_effect = [b"Password:", b"Success", b""]
self.mock_popen_res.stderr.read.side_effect = [b""]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ),
(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "this is input data")
assert return_code == 0
assert b_stdout == b''
assert b_stderr == b''
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is True
assert self.conn._send_initial_data.call_count == 1
assert self.conn._send_initial_data.call_args[0][1] == 'this is input data'
def test_pasword_with_become(self):
# test with some become settings
self.pc.prompt = b'Password:'
self.pc.become = True
self.pc.success_key = 'BECOME-SUCCESS-abcdefg'
self.conn._examine_output.side_effect = self._password_with_prompt_examine_output
self.mock_popen_res.stdout.read.side_effect = [b"Password:", b"BECOME-SUCCESS-abcdefg", b"abc"]
self.mock_popen_res.stderr.read.side_effect = [b"123"]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "this is input data")
assert return_code == 0
assert b_stdout == b'abc'
assert b_stderr == b'123'
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is True
assert self.conn._send_initial_data.call_count == 1
assert self.conn._send_initial_data.call_args[0][1] == 'this is input data'
def test_pasword_without_data(self):
# simulate no data input
self.mock_openpty.return_value = (98, 99)
self.mock_popen_res.stdout.read.side_effect = [b"some data", b"", b""]
self.mock_popen_res.stderr.read.side_effect = [b""]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "")
assert return_code == 0
assert b_stdout == b'some data'
assert b_stderr == b''
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is False
def test_pasword_without_data(self):
# simulate no data input but Popen using new pty's fails
self.mock_popen.return_value = None
self.mock_popen.side_effect = [OSError(), self.mock_popen_res]
# simulate no data input
self.mock_openpty.return_value = (98, 99)
self.mock_popen_res.stdout.read.side_effect = [b"some data", b"", b""]
self.mock_popen_res.stderr.read.side_effect = [b""]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "")
assert return_code == 0
assert b_stdout == b'some data'
assert b_stderr == b''
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is False
@pytest.mark.usefixtures('mock_run_env')
class TestSSHConnectionRetries(object):
def test_retry_then_success(self, monkeypatch):
monkeypatch.setattr(C, 'HOST_KEY_CHECKING', False)
monkeypatch.setattr(C, 'ANSIBLE_SSH_RETRIES', 3)
monkeypatch.setattr('time.sleep', lambda x: None)
self.mock_popen_res.stdout.read.side_effect = [b"", b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"", b"my_stderr"]
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[255] * 3 + [0] * 4)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[]
]
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'ssh'
return_code, b_stdout, b_stderr = self.conn.exec_command('ssh', 'some data')
assert return_code == 0
assert b_stdout == b'my_stdout\nsecond_line'
assert b_stderr == b'my_stderr'
def test_multiple_failures(self, monkeypatch):
monkeypatch.setattr(C, 'HOST_KEY_CHECKING', False)
monkeypatch.setattr(C, 'ANSIBLE_SSH_RETRIES', 9)
monkeypatch.setattr('time.sleep', lambda x: None)
self.mock_popen_res.stdout.read.side_effect = [b""] * 11
self.mock_popen_res.stderr.read.side_effect = [b""] * 11
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[255] * 30)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
] * 10
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'ssh'
pytest.raises(AnsibleConnectionFailure, self.conn.exec_command, 'ssh', 'some data')
assert self.mock_popen.call_count == 10
def test_abitrary_exceptions(self, monkeypatch):
monkeypatch.setattr(C, 'HOST_KEY_CHECKING', False)
monkeypatch.setattr(C, 'ANSIBLE_SSH_RETRIES', 9)
monkeypatch.setattr('time.sleep', lambda x: None)
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'ssh'
self.mock_popen.side_effect = [Exception('bad')] * 10
pytest.raises(Exception, self.conn.exec_command, 'ssh', 'some data')
assert self.mock_popen.call_count == 10
def test_put_file_retries(self, monkeypatch):
monkeypatch.setattr(C, 'HOST_KEY_CHECKING', False)
monkeypatch.setattr(C, 'ANSIBLE_SSH_RETRIES', 3)
monkeypatch.setattr('time.sleep', lambda x: None)
monkeypatch.setattr('ansible.plugins.connection.ssh.os.path.exists', lambda x: True)
self.mock_popen_res.stdout.read.side_effect = [b"", b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"", b"my_stderr"]
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[255] * 4 + [0] * 4)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[]
]
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'sftp'
return_code, b_stdout, b_stderr = self.conn.put_file('/path/to/in/file', '/path/to/dest/file')
assert return_code == 0
assert b_stdout == b"my_stdout\nsecond_line"
assert b_stderr == b"my_stderr"
assert self.mock_popen.call_count == 2
def test_fetch_file_retries(self, monkeypatch):
monkeypatch.setattr(C, 'HOST_KEY_CHECKING', False)
monkeypatch.setattr(C, 'ANSIBLE_SSH_RETRIES', 3)
monkeypatch.setattr('time.sleep', lambda x: None)
monkeypatch.setattr('ansible.plugins.connection.ssh.os.path.exists', lambda x: True)
self.mock_popen_res.stdout.read.side_effect = [b"", b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"", b"my_stderr"]
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[255] * 4 + [0] * 4)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[]
]
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'sftp'
return_code, b_stdout, b_stderr = self.conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
assert return_code == 0
assert b_stdout == b"my_stdout\nsecond_line"
assert b_stderr == b"my_stderr"
assert self.mock_popen.call_count == 2
| gpl-3.0 |
aperigault/ansible | lib/ansible/modules/cloud/openstack/os_server.py | 25 | 25051 | #!/usr/bin/python
# coding: utf-8 -*-
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2013, Benno Joy <benno@ansible.com>
# Copyright (c) 2013, John Dewey <john@dewey.ws>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: os_server
short_description: Create/Delete Compute Instances from OpenStack
extends_documentation_fragment: openstack
version_added: "2.0"
author: "Monty Taylor (@emonty)"
description:
- Create or Remove compute instances from OpenStack.
options:
name:
description:
- Name that has to be given to the instance. It is also possible to
specify the ID of the instance instead of its name if I(state) is I(absent).
required: true
image:
description:
- The name or id of the base image to boot.
required: true
image_exclude:
description:
- Text to use to filter image names, for the case, such as HP, where
there are multiple image names matching the common identifying
portions. image_exclude is a negative match filter - it is text that
may not exist in the image name. Defaults to "(deprecated)"
flavor:
description:
- The name or id of the flavor in which the new instance has to be
created. Mutually exclusive with flavor_ram
default: 1
flavor_ram:
description:
- The minimum amount of ram in MB that the flavor in which the new
instance has to be created must have. Mutually exclusive with flavor.
default: 1
flavor_include:
description:
- Text to use to filter flavor names, for the case, such as Rackspace,
where there are multiple flavors that have the same ram count.
flavor_include is a positive match filter - it must exist in the
flavor name.
key_name:
description:
- The key pair name to be used when creating a instance
security_groups:
description:
- Names of the security groups to which the instance should be
added. This may be a YAML list or a comma separated string.
network:
description:
- Name or ID of a network to attach this instance to. A simpler
version of the nics parameter, only one of network or nics should
be supplied.
nics:
description:
- A list of networks to which the instance's interface should
be attached. Networks may be referenced by net-id/net-name/port-id
or port-name.
- 'Also this accepts a string containing a list of (net/port)-(id/name)
Eg: nics: "net-id=uuid-1,port-name=myport"
Only one of network or nics should be supplied.'
auto_ip:
description:
- Ensure instance has public ip however the cloud wants to do that
type: bool
default: 'yes'
aliases: ['auto_floating_ip', 'public_ip']
floating_ips:
description:
- list of valid floating IPs that pre-exist to assign to this node
floating_ip_pools:
description:
- Name of floating IP pool from which to choose a floating IP
meta:
description:
- 'A list of key value pairs that should be provided as a metadata to
the new instance or a string containing a list of key-value pairs.
Eg: meta: "key1=value1,key2=value2"'
wait:
description:
- If the module should wait for the instance to be created.
type: bool
default: 'yes'
timeout:
description:
- The amount of time the module should wait for the instance to get
into active state.
default: 180
config_drive:
description:
- Whether to boot the server with config drive enabled
type: bool
default: 'no'
userdata:
description:
- Opaque blob of data which is made available to the instance
boot_from_volume:
description:
- Should the instance boot from a persistent volume created based on
the image given. Mututally exclusive with boot_volume.
type: bool
default: 'no'
volume_size:
description:
- The size of the volume to create in GB if booting from volume based
on an image.
boot_volume:
description:
- Volume name or id to use as the volume to boot from. Implies
boot_from_volume. Mutually exclusive with image and boot_from_volume.
aliases: ['root_volume']
terminate_volume:
description:
- If C(yes), delete volume when deleting instance (if booted from volume)
type: bool
default: 'no'
volumes:
description:
- A list of preexisting volumes names or ids to attach to the instance
default: []
scheduler_hints:
description:
- Arbitrary key/value pairs to the scheduler for custom use
version_added: "2.1"
state:
description:
- Should the resource be present or absent.
choices: [present, absent]
default: present
delete_fip:
description:
- When I(state) is absent and this option is true, any floating IP
associated with the instance will be deleted along with the instance.
type: bool
default: 'no'
version_added: "2.2"
reuse_ips:
description:
- When I(auto_ip) is true and this option is true, the I(auto_ip) code
will attempt to re-use unassigned floating ips in the project before
creating a new one. It is important to note that it is impossible
to safely do this concurrently, so if your use case involves
concurrent server creation, it is highly recommended to set this to
false and to delete the floating ip associated with a server when
the server is deleted using I(delete_fip).
type: bool
default: 'yes'
version_added: "2.2"
availability_zone:
description:
- Availability zone in which to create the server.
requirements:
- "python >= 2.7"
- "openstacksdk"
'''
EXAMPLES = '''
- name: Create a new instance and attaches to a network and passes metadata to the instance
os_server:
state: present
auth:
auth_url: https://identity.example.com
username: admin
password: admin
project_name: admin
name: vm1
image: 4f905f38-e52a-43d2-b6ec-754a13ffb529
key_name: ansible_key
timeout: 200
flavor: 4
nics:
- net-id: 34605f38-e52a-25d2-b6ec-754a13ffb723
- net-name: another_network
meta:
hostname: test1
group: uge_master
# Create a new instance in HP Cloud AE1 region availability zone az2 and
# automatically assigns a floating IP
- name: launch a compute instance
hosts: localhost
tasks:
- name: launch an instance
os_server:
state: present
auth:
auth_url: https://identity.example.com
username: username
password: Equality7-2521
project_name: username-project1
name: vm1
region_name: region-b.geo-1
availability_zone: az2
image: 9302692b-b787-4b52-a3a6-daebb79cb498
key_name: test
timeout: 200
flavor: 101
security_groups: default
auto_ip: yes
# Create a new instance in named cloud mordred availability zone az2
# and assigns a pre-known floating IP
- name: launch a compute instance
hosts: localhost
tasks:
- name: launch an instance
os_server:
state: present
cloud: mordred
name: vm1
availability_zone: az2
image: 9302692b-b787-4b52-a3a6-daebb79cb498
key_name: test
timeout: 200
flavor: 101
floating_ips:
- 12.34.56.79
# Create a new instance with 4G of RAM on Ubuntu Trusty, ignoring
# deprecated images
- name: launch a compute instance
hosts: localhost
tasks:
- name: launch an instance
os_server:
name: vm1
state: present
cloud: mordred
region_name: region-b.geo-1
image: Ubuntu Server 14.04
image_exclude: deprecated
flavor_ram: 4096
# Create a new instance with 4G of RAM on Ubuntu Trusty on a Performance node
- name: launch a compute instance
hosts: localhost
tasks:
- name: launch an instance
os_server:
name: vm1
cloud: rax-dfw
state: present
image: Ubuntu 14.04 LTS (Trusty Tahr) (PVHVM)
flavor_ram: 4096
flavor_include: Performance
# Creates a new instance and attaches to multiple network
- name: launch a compute instance
hosts: localhost
tasks:
- name: launch an instance with a string
os_server:
auth:
auth_url: https://identity.example.com
username: admin
password: admin
project_name: admin
name: vm1
image: 4f905f38-e52a-43d2-b6ec-754a13ffb529
key_name: ansible_key
timeout: 200
flavor: 4
nics: "net-id=4cb08b20-62fe-11e5-9d70-feff819cdc9f,net-id=542f0430-62fe-11e5-9d70-feff819cdc9f..."
- name: Creates a new instance and attaches to a network and passes metadata to the instance
os_server:
state: present
auth:
auth_url: https://identity.example.com
username: admin
password: admin
project_name: admin
name: vm1
image: 4f905f38-e52a-43d2-b6ec-754a13ffb529
key_name: ansible_key
timeout: 200
flavor: 4
nics:
- net-id: 34605f38-e52a-25d2-b6ec-754a13ffb723
- net-name: another_network
meta: "hostname=test1,group=uge_master"
- name: Creates a new instance and attaches to a specific network
os_server:
state: present
auth:
auth_url: https://identity.example.com
username: admin
password: admin
project_name: admin
name: vm1
image: 4f905f38-e52a-43d2-b6ec-754a13ffb529
key_name: ansible_key
timeout: 200
flavor: 4
network: another_network
# Create a new instance with 4G of RAM on a 75G Ubuntu Trusty volume
- name: launch a compute instance
hosts: localhost
tasks:
- name: launch an instance
os_server:
name: vm1
state: present
cloud: mordred
region_name: ams01
image: Ubuntu Server 14.04
flavor_ram: 4096
boot_from_volume: True
volume_size: 75
# Creates a new instance with 2 volumes attached
- name: launch a compute instance
hosts: localhost
tasks:
- name: launch an instance
os_server:
name: vm1
state: present
cloud: mordred
region_name: ams01
image: Ubuntu Server 14.04
flavor_ram: 4096
volumes:
- photos
- music
# Creates a new instance with provisioning userdata using Cloud-Init
- name: launch a compute instance
hosts: localhost
tasks:
- name: launch an instance
os_server:
name: vm1
state: present
image: "Ubuntu Server 14.04"
flavor: "P-1"
network: "Production"
userdata: |
#cloud-config
chpasswd:
list: |
ubuntu:{{ default_password }}
expire: False
packages:
- ansible
package_upgrade: true
# Creates a new instance with provisioning userdata using Bash Scripts
- name: launch a compute instance
hosts: localhost
tasks:
- name: launch an instance
os_server:
name: vm1
state: present
image: "Ubuntu Server 14.04"
flavor: "P-1"
network: "Production"
userdata: |
{%- raw -%}#!/bin/bash
echo " up ip route add 10.0.0.0/8 via {% endraw -%}{{ intra_router }}{%- raw -%}" >> /etc/network/interfaces.d/eth0.conf
echo " down ip route del 10.0.0.0/8" >> /etc/network/interfaces.d/eth0.conf
ifdown eth0 && ifup eth0
{% endraw %}
# Create a new instance with server group for (anti-)affinity
# server group ID is returned from os_server_group module.
- name: launch a compute instance
hosts: localhost
tasks:
- name: launch an instance
os_server:
state: present
name: vm1
image: 4f905f38-e52a-43d2-b6ec-754a13ffb529
flavor: 4
scheduler_hints:
group: f5c8c61a-9230-400a-8ed2-3b023c190a7f
# Deletes an instance via its ID
- name: remove an instance
hosts: localhost
tasks:
- name: remove an instance
os_server:
name: abcdef01-2345-6789-0abc-def0123456789
state: absent
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.openstack import (
openstack_find_nova_addresses, openstack_cloud_from_module,
openstack_full_argument_spec, openstack_module_kwargs)
def _exit_hostvars(module, cloud, server, changed=True):
hostvars = cloud.get_openstack_vars(server)
module.exit_json(
changed=changed, server=server, id=server.id, openstack=hostvars)
def _parse_nics(nics):
for net in nics:
if isinstance(net, str):
for nic in net.split(','):
yield dict((nic.split('='),))
else:
yield net
def _network_args(module, cloud):
args = []
nics = module.params['nics']
if not isinstance(nics, list):
module.fail_json(msg='The \'nics\' parameter must be a list.')
for net in _parse_nics(nics):
if not isinstance(net, dict):
module.fail_json(
msg='Each entry in the \'nics\' parameter must be a dict.')
if net.get('net-id'):
args.append(net)
elif net.get('net-name'):
by_name = cloud.get_network(net['net-name'])
if not by_name:
module.fail_json(
msg='Could not find network by net-name: %s' %
net['net-name'])
args.append({'net-id': by_name['id']})
elif net.get('port-id'):
args.append(net)
elif net.get('port-name'):
by_name = cloud.get_port(net['port-name'])
if not by_name:
module.fail_json(
msg='Could not find port by port-name: %s' %
net['port-name'])
args.append({'port-id': by_name['id']})
return args
def _parse_meta(meta):
if isinstance(meta, str):
metas = {}
for kv_str in meta.split(","):
k, v = kv_str.split("=")
metas[k] = v
return metas
if not meta:
return {}
return meta
def _delete_server(module, cloud):
try:
cloud.delete_server(
module.params['name'], wait=module.params['wait'],
timeout=module.params['timeout'],
delete_ips=module.params['delete_fip'])
except Exception as e:
module.fail_json(msg="Error in deleting vm: %s" % e.message)
module.exit_json(changed=True, result='deleted')
def _create_server(module, cloud):
flavor = module.params['flavor']
flavor_ram = module.params['flavor_ram']
flavor_include = module.params['flavor_include']
image_id = None
if not module.params['boot_volume']:
image_id = cloud.get_image_id(
module.params['image'], module.params['image_exclude'])
if not image_id:
module.fail_json(msg="Could not find image %s" %
module.params['image'])
if flavor:
flavor_dict = cloud.get_flavor(flavor)
if not flavor_dict:
module.fail_json(msg="Could not find flavor %s" % flavor)
else:
flavor_dict = cloud.get_flavor_by_ram(flavor_ram, flavor_include)
if not flavor_dict:
module.fail_json(msg="Could not find any matching flavor")
nics = _network_args(module, cloud)
module.params['meta'] = _parse_meta(module.params['meta'])
bootkwargs = dict(
name=module.params['name'],
image=image_id,
flavor=flavor_dict['id'],
nics=nics,
meta=module.params['meta'],
security_groups=module.params['security_groups'],
userdata=module.params['userdata'],
config_drive=module.params['config_drive'],
)
for optional_param in (
'key_name', 'availability_zone', 'network',
'scheduler_hints', 'volume_size', 'volumes'):
if module.params[optional_param]:
bootkwargs[optional_param] = module.params[optional_param]
server = cloud.create_server(
ip_pool=module.params['floating_ip_pools'],
ips=module.params['floating_ips'],
auto_ip=module.params['auto_ip'],
boot_volume=module.params['boot_volume'],
boot_from_volume=module.params['boot_from_volume'],
terminate_volume=module.params['terminate_volume'],
reuse_ips=module.params['reuse_ips'],
wait=module.params['wait'], timeout=module.params['timeout'],
**bootkwargs
)
_exit_hostvars(module, cloud, server)
def _update_server(module, cloud, server):
changed = False
module.params['meta'] = _parse_meta(module.params['meta'])
# cloud.set_server_metadata only updates the key=value pairs, it doesn't
# touch existing ones
update_meta = {}
for (k, v) in module.params['meta'].items():
if k not in server.metadata or server.metadata[k] != v:
update_meta[k] = v
if update_meta:
cloud.set_server_metadata(server, update_meta)
changed = True
# Refresh server vars
server = cloud.get_server(module.params['name'])
return (changed, server)
def _detach_ip_list(cloud, server, extra_ips):
for ip in extra_ips:
ip_id = cloud.get_floating_ip(
id=None, filters={'floating_ip_address': ip})
cloud.detach_ip_from_server(
server_id=server.id, floating_ip_id=ip_id)
def _check_ips(module, cloud, server):
changed = False
auto_ip = module.params['auto_ip']
floating_ips = module.params['floating_ips']
floating_ip_pools = module.params['floating_ip_pools']
if floating_ip_pools or floating_ips:
ips = openstack_find_nova_addresses(server.addresses, 'floating')
if not ips:
# If we're configured to have a floating but we don't have one,
# let's add one
server = cloud.add_ips_to_server(
server,
auto_ip=auto_ip,
ips=floating_ips,
ip_pool=floating_ip_pools,
wait=module.params['wait'],
timeout=module.params['timeout'],
)
changed = True
elif floating_ips:
# we were configured to have specific ips, let's make sure we have
# those
missing_ips = []
for ip in floating_ips:
if ip not in ips:
missing_ips.append(ip)
if missing_ips:
server = cloud.add_ip_list(server, missing_ips,
wait=module.params['wait'],
timeout=module.params['timeout'])
changed = True
extra_ips = []
for ip in ips:
if ip not in floating_ips:
extra_ips.append(ip)
if extra_ips:
_detach_ip_list(cloud, server, extra_ips)
changed = True
elif auto_ip:
if server['interface_ip']:
changed = False
else:
# We're configured for auto_ip but we're not showing an
# interface_ip. Maybe someone deleted an IP out from under us.
server = cloud.add_ips_to_server(
server,
auto_ip=auto_ip,
ips=floating_ips,
ip_pool=floating_ip_pools,
wait=module.params['wait'],
timeout=module.params['timeout'],
)
changed = True
return (changed, server)
def _check_security_groups(module, cloud, server):
changed = False
# server security groups were added to shade in 1.19. Until then this
# module simply ignored trying to update security groups and only set them
# on newly created hosts.
if not (hasattr(cloud, 'add_server_security_groups') and
hasattr(cloud, 'remove_server_security_groups')):
return changed, server
module_security_groups = set(module.params['security_groups'])
server_security_groups = set(sg['name'] for sg in server.security_groups)
add_sgs = module_security_groups - server_security_groups
remove_sgs = server_security_groups - module_security_groups
if add_sgs:
cloud.add_server_security_groups(server, list(add_sgs))
changed = True
if remove_sgs:
cloud.remove_server_security_groups(server, list(remove_sgs))
changed = True
return (changed, server)
def _get_server_state(module, cloud):
state = module.params['state']
server = cloud.get_server(module.params['name'])
if server and state == 'present':
if server.status not in ('ACTIVE', 'SHUTOFF', 'PAUSED', 'SUSPENDED'):
module.fail_json(
msg="The instance is available but not Active state: " + server.status)
(ip_changed, server) = _check_ips(module, cloud, server)
(sg_changed, server) = _check_security_groups(module, cloud, server)
(server_changed, server) = _update_server(module, cloud, server)
_exit_hostvars(module, cloud, server,
ip_changed or sg_changed or server_changed)
if server and state == 'absent':
return True
if state == 'absent':
module.exit_json(changed=False, result="not present")
return True
def main():
argument_spec = openstack_full_argument_spec(
name=dict(required=True),
image=dict(default=None),
image_exclude=dict(default='(deprecated)'),
flavor=dict(default=None),
flavor_ram=dict(default=None, type='int'),
flavor_include=dict(default=None),
key_name=dict(default=None),
security_groups=dict(default=['default'], type='list'),
network=dict(default=None),
nics=dict(default=[], type='list'),
meta=dict(default=None, type='raw'),
userdata=dict(default=None, aliases=['user_data']),
config_drive=dict(default=False, type='bool'),
auto_ip=dict(default=True, type='bool', aliases=['auto_floating_ip', 'public_ip']),
floating_ips=dict(default=None, type='list'),
floating_ip_pools=dict(default=None, type='list'),
volume_size=dict(default=False, type='int'),
boot_from_volume=dict(default=False, type='bool'),
boot_volume=dict(default=None, aliases=['root_volume']),
terminate_volume=dict(default=False, type='bool'),
volumes=dict(default=[], type='list'),
scheduler_hints=dict(default=None, type='dict'),
state=dict(default='present', choices=['absent', 'present']),
delete_fip=dict(default=False, type='bool'),
reuse_ips=dict(default=True, type='bool'),
)
module_kwargs = openstack_module_kwargs(
mutually_exclusive=[
['auto_ip', 'floating_ips'],
['auto_ip', 'floating_ip_pools'],
['floating_ips', 'floating_ip_pools'],
['flavor', 'flavor_ram'],
['image', 'boot_volume'],
['boot_from_volume', 'boot_volume'],
['nics', 'network'],
],
required_if=[
('boot_from_volume', True, ['volume_size', 'image']),
],
)
module = AnsibleModule(argument_spec, **module_kwargs)
state = module.params['state']
image = module.params['image']
boot_volume = module.params['boot_volume']
flavor = module.params['flavor']
flavor_ram = module.params['flavor_ram']
if state == 'present':
if not (image or boot_volume):
module.fail_json(
msg="Parameter 'image' or 'boot_volume' is required "
"if state == 'present'"
)
if not flavor and not flavor_ram:
module.fail_json(
msg="Parameter 'flavor' or 'flavor_ram' is required "
"if state == 'present'"
)
sdk, cloud = openstack_cloud_from_module(module)
try:
if state == 'present':
_get_server_state(module, cloud)
_create_server(module, cloud)
elif state == 'absent':
_get_server_state(module, cloud)
_delete_server(module, cloud)
except sdk.exceptions.OpenStackCloudException as e:
module.fail_json(msg=str(e), extra_data=e.extra_data)
if __name__ == '__main__':
main()
| gpl-3.0 |
zero-ui/miniblink49 | v8_5_7/testing/gmock/scripts/gmock_doctor.py | 346 | 24131 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Converts compiler's errors in code using Google Mock to plain English."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import re
import sys
_VERSION = '1.0.3'
_EMAIL = 'googlemock@googlegroups.com'
_COMMON_GMOCK_SYMBOLS = [
# Matchers
'_',
'A',
'AddressSatisfies',
'AllOf',
'An',
'AnyOf',
'ContainerEq',
'Contains',
'ContainsRegex',
'DoubleEq',
'ElementsAre',
'ElementsAreArray',
'EndsWith',
'Eq',
'Field',
'FloatEq',
'Ge',
'Gt',
'HasSubstr',
'IsInitializedProto',
'Le',
'Lt',
'MatcherCast',
'Matches',
'MatchesRegex',
'NanSensitiveDoubleEq',
'NanSensitiveFloatEq',
'Ne',
'Not',
'NotNull',
'Pointee',
'Property',
'Ref',
'ResultOf',
'SafeMatcherCast',
'StartsWith',
'StrCaseEq',
'StrCaseNe',
'StrEq',
'StrNe',
'Truly',
'TypedEq',
'Value',
# Actions
'Assign',
'ByRef',
'DeleteArg',
'DoAll',
'DoDefault',
'IgnoreResult',
'Invoke',
'InvokeArgument',
'InvokeWithoutArgs',
'Return',
'ReturnNew',
'ReturnNull',
'ReturnRef',
'SaveArg',
'SetArgReferee',
'SetArgPointee',
'SetArgumentPointee',
'SetArrayArgument',
'SetErrnoAndReturn',
'Throw',
'WithArg',
'WithArgs',
'WithoutArgs',
# Cardinalities
'AnyNumber',
'AtLeast',
'AtMost',
'Between',
'Exactly',
# Sequences
'InSequence',
'Sequence',
# Misc
'DefaultValue',
'Mock',
]
# Regex for matching source file path and line number in the compiler's errors.
_GCC_FILE_LINE_RE = r'(?P<file>.*):(?P<line>\d+):(\d+:)?\s+'
_CLANG_FILE_LINE_RE = r'(?P<file>.*):(?P<line>\d+):(?P<column>\d+):\s+'
_CLANG_NON_GMOCK_FILE_LINE_RE = (
r'(?P<file>.*[/\\^](?!gmock-)[^/\\]+):(?P<line>\d+):(?P<column>\d+):\s+')
def _FindAllMatches(regex, s):
"""Generates all matches of regex in string s."""
r = re.compile(regex)
return r.finditer(s)
def _GenericDiagnoser(short_name, long_name, diagnoses, msg):
"""Diagnoses the given disease by pattern matching.
Can provide different diagnoses for different patterns.
Args:
short_name: Short name of the disease.
long_name: Long name of the disease.
diagnoses: A list of pairs (regex, pattern for formatting the diagnosis
for matching regex).
msg: Compiler's error messages.
Yields:
Tuples of the form
(short name of disease, long name of disease, diagnosis).
"""
for regex, diagnosis in diagnoses:
if re.search(regex, msg):
diagnosis = '%(file)s:%(line)s:' + diagnosis
for m in _FindAllMatches(regex, msg):
yield (short_name, long_name, diagnosis % m.groupdict())
def _NeedToReturnReferenceDiagnoser(msg):
"""Diagnoses the NRR disease, given the error messages by the compiler."""
gcc_regex = (r'In member function \'testing::internal::ReturnAction<R>.*\n'
+ _GCC_FILE_LINE_RE + r'instantiated from here\n'
r'.*gmock-actions\.h.*error: creating array with negative size')
clang_regex = (r'error:.*array.*negative.*\r?\n'
r'(.*\n)*?' +
_CLANG_NON_GMOCK_FILE_LINE_RE +
r'note: in instantiation of function template specialization '
r'\'testing::internal::ReturnAction<(?P<type>.*)>'
r'::operator Action<.*>\' requested here')
clang11_re = (r'use_ReturnRef_instead_of_Return_to_return_a_reference.*'
r'(.*\n)*?' + _CLANG_NON_GMOCK_FILE_LINE_RE)
diagnosis = """
You are using a Return() action in a function that returns a reference to
%(type)s. Please use ReturnRef() instead."""
return _GenericDiagnoser('NRR', 'Need to Return Reference',
[(clang_regex, diagnosis),
(clang11_re, diagnosis % {'type': 'a type'}),
(gcc_regex, diagnosis % {'type': 'a type'})],
msg)
def _NeedToReturnSomethingDiagnoser(msg):
"""Diagnoses the NRS disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'(instantiated from here\n.'
r'*gmock.*actions\.h.*error: void value not ignored)'
r'|(error: control reaches end of non-void function)')
clang_regex1 = (_CLANG_FILE_LINE_RE +
r'error: cannot initialize return object '
r'of type \'Result\' \(aka \'(?P<return_type>.*)\'\) '
r'with an rvalue of type \'void\'')
clang_regex2 = (_CLANG_FILE_LINE_RE +
r'error: cannot initialize return object '
r'of type \'(?P<return_type>.*)\' '
r'with an rvalue of type \'void\'')
diagnosis = """
You are using an action that returns void, but it needs to return
%(return_type)s. Please tell it *what* to return. Perhaps you can use
the pattern DoAll(some_action, Return(some_value))?"""
return _GenericDiagnoser(
'NRS',
'Need to Return Something',
[(gcc_regex, diagnosis % {'return_type': '*something*'}),
(clang_regex1, diagnosis),
(clang_regex2, diagnosis)],
msg)
def _NeedToReturnNothingDiagnoser(msg):
"""Diagnoses the NRN disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'instantiated from here\n'
r'.*gmock-actions\.h.*error: instantiation of '
r'\'testing::internal::ReturnAction<R>::Impl<F>::value_\' '
r'as type \'void\'')
clang_regex1 = (r'error: field has incomplete type '
r'\'Result\' \(aka \'void\'\)(\r)?\n'
r'(.*\n)*?' +
_CLANG_NON_GMOCK_FILE_LINE_RE + r'note: in instantiation '
r'of function template specialization '
r'\'testing::internal::ReturnAction<(?P<return_type>.*)>'
r'::operator Action<void \(.*\)>\' requested here')
clang_regex2 = (r'error: field has incomplete type '
r'\'Result\' \(aka \'void\'\)(\r)?\n'
r'(.*\n)*?' +
_CLANG_NON_GMOCK_FILE_LINE_RE + r'note: in instantiation '
r'of function template specialization '
r'\'testing::internal::DoBothAction<.*>'
r'::operator Action<(?P<return_type>.*) \(.*\)>\' '
r'requested here')
diagnosis = """
You are using an action that returns %(return_type)s, but it needs to return
void. Please use a void-returning action instead.
All actions but the last in DoAll(...) must return void. Perhaps you need
to re-arrange the order of actions in a DoAll(), if you are using one?"""
return _GenericDiagnoser(
'NRN',
'Need to Return Nothing',
[(gcc_regex, diagnosis % {'return_type': '*something*'}),
(clang_regex1, diagnosis),
(clang_regex2, diagnosis)],
msg)
def _IncompleteByReferenceArgumentDiagnoser(msg):
"""Diagnoses the IBRA disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'instantiated from here\n'
r'.*gtest-printers\.h.*error: invalid application of '
r'\'sizeof\' to incomplete type \'(?P<type>.*)\'')
clang_regex = (r'.*gtest-printers\.h.*error: invalid application of '
r'\'sizeof\' to an incomplete type '
r'\'(?P<type>.*)( const)?\'\r?\n'
r'(.*\n)*?' +
_CLANG_NON_GMOCK_FILE_LINE_RE +
r'note: in instantiation of member function '
r'\'testing::internal2::TypeWithoutFormatter<.*>::'
r'PrintValue\' requested here')
diagnosis = """
In order to mock this function, Google Mock needs to see the definition
of type "%(type)s" - declaration alone is not enough. Either #include
the header that defines it, or change the argument to be passed
by pointer."""
return _GenericDiagnoser('IBRA', 'Incomplete By-Reference Argument Type',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis)],
msg)
def _OverloadedFunctionMatcherDiagnoser(msg):
"""Diagnoses the OFM disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'error: no matching function for '
r'call to \'Truly\(<unresolved overloaded function type>\)')
clang_regex = (_CLANG_FILE_LINE_RE + r'error: no matching function for '
r'call to \'Truly')
diagnosis = """
The argument you gave to Truly() is an overloaded function. Please tell
your compiler which overloaded version you want to use.
For example, if you want to use the version whose signature is
bool Foo(int n);
you should write
Truly(static_cast<bool (*)(int n)>(Foo))"""
return _GenericDiagnoser('OFM', 'Overloaded Function Matcher',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis)],
msg)
def _OverloadedFunctionActionDiagnoser(msg):
"""Diagnoses the OFA disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'error: no matching function for call to '
r'\'Invoke\(<unresolved overloaded function type>')
clang_regex = (_CLANG_FILE_LINE_RE + r'error: no matching '
r'function for call to \'Invoke\'\r?\n'
r'(.*\n)*?'
r'.*\bgmock-generated-actions\.h:\d+:\d+:\s+'
r'note: candidate template ignored:\s+'
r'couldn\'t infer template argument \'FunctionImpl\'')
diagnosis = """
Function you are passing to Invoke is overloaded. Please tell your compiler
which overloaded version you want to use.
For example, if you want to use the version whose signature is
bool MyFunction(int n, double x);
you should write something like
Invoke(static_cast<bool (*)(int n, double x)>(MyFunction))"""
return _GenericDiagnoser('OFA', 'Overloaded Function Action',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis)],
msg)
def _OverloadedMethodActionDiagnoser(msg):
"""Diagnoses the OMA disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'error: no matching function for '
r'call to \'Invoke\(.+, <unresolved overloaded function '
r'type>\)')
clang_regex = (_CLANG_FILE_LINE_RE + r'error: no matching function '
r'for call to \'Invoke\'\r?\n'
r'(.*\n)*?'
r'.*\bgmock-generated-actions\.h:\d+:\d+: '
r'note: candidate function template not viable: '
r'requires .*, but 2 (arguments )?were provided')
diagnosis = """
The second argument you gave to Invoke() is an overloaded method. Please
tell your compiler which overloaded version you want to use.
For example, if you want to use the version whose signature is
class Foo {
...
bool Bar(int n, double x);
};
you should write something like
Invoke(foo, static_cast<bool (Foo::*)(int n, double x)>(&Foo::Bar))"""
return _GenericDiagnoser('OMA', 'Overloaded Method Action',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis)],
msg)
def _MockObjectPointerDiagnoser(msg):
"""Diagnoses the MOP disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'error: request for member '
r'\'gmock_(?P<method>.+)\' in \'(?P<mock_object>.+)\', '
r'which is of non-class type \'(.*::)*(?P<class_name>.+)\*\'')
clang_regex = (_CLANG_FILE_LINE_RE + r'error: member reference type '
r'\'(?P<class_name>.*?) *\' is a pointer; '
r'(did you mean|maybe you meant) to use \'->\'\?')
diagnosis = """
The first argument to ON_CALL() and EXPECT_CALL() must be a mock *object*,
not a *pointer* to it. Please write '*(%(mock_object)s)' instead of
'%(mock_object)s' as your first argument.
For example, given the mock class:
class %(class_name)s : public ... {
...
MOCK_METHOD0(%(method)s, ...);
};
and the following mock instance:
%(class_name)s* mock_ptr = ...
you should use the EXPECT_CALL like this:
EXPECT_CALL(*mock_ptr, %(method)s(...));"""
return _GenericDiagnoser(
'MOP',
'Mock Object Pointer',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis % {'mock_object': 'mock_object',
'method': 'method',
'class_name': '%(class_name)s'})],
msg)
def _NeedToUseSymbolDiagnoser(msg):
"""Diagnoses the NUS disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'error: \'(?P<symbol>.+)\' '
r'(was not declared in this scope|has not been declared)')
clang_regex = (_CLANG_FILE_LINE_RE +
r'error: (use of undeclared identifier|unknown type name|'
r'no template named) \'(?P<symbol>[^\']+)\'')
diagnosis = """
'%(symbol)s' is defined by Google Mock in the testing namespace.
Did you forget to write
using testing::%(symbol)s;
?"""
for m in (list(_FindAllMatches(gcc_regex, msg)) +
list(_FindAllMatches(clang_regex, msg))):
symbol = m.groupdict()['symbol']
if symbol in _COMMON_GMOCK_SYMBOLS:
yield ('NUS', 'Need to Use Symbol', diagnosis % m.groupdict())
def _NeedToUseReturnNullDiagnoser(msg):
"""Diagnoses the NRNULL disease, given the error messages by the compiler."""
gcc_regex = ('instantiated from \'testing::internal::ReturnAction<R>'
'::operator testing::Action<Func>\(\) const.*\n' +
_GCC_FILE_LINE_RE + r'instantiated from here\n'
r'.*error: no matching function for call to \'ImplicitCast_\('
r'(:?long )?int&\)')
clang_regex = (r'\bgmock-actions.h:.* error: no matching function for '
r'call to \'ImplicitCast_\'\r?\n'
r'(.*\n)*?' +
_CLANG_NON_GMOCK_FILE_LINE_RE + r'note: in instantiation '
r'of function template specialization '
r'\'testing::internal::ReturnAction<(int|long)>::operator '
r'Action<(?P<type>.*)\(\)>\' requested here')
diagnosis = """
You are probably calling Return(NULL) and the compiler isn't sure how to turn
NULL into %(type)s. Use ReturnNull() instead.
Note: the line number may be off; please fix all instances of Return(NULL)."""
return _GenericDiagnoser(
'NRNULL', 'Need to use ReturnNull',
[(clang_regex, diagnosis),
(gcc_regex, diagnosis % {'type': 'the right type'})],
msg)
def _TypeInTemplatedBaseDiagnoser(msg):
"""Diagnoses the TTB disease, given the error messages by the compiler."""
# This version works when the type is used as the mock function's return
# type.
gcc_4_3_1_regex_type_in_retval = (
r'In member function \'int .*\n' + _GCC_FILE_LINE_RE +
r'error: a function call cannot appear in a constant-expression')
gcc_4_4_0_regex_type_in_retval = (
r'error: a function call cannot appear in a constant-expression'
+ _GCC_FILE_LINE_RE + r'error: template argument 1 is invalid\n')
# This version works when the type is used as the mock function's sole
# parameter type.
gcc_regex_type_of_sole_param = (
_GCC_FILE_LINE_RE +
r'error: \'(?P<type>.+)\' was not declared in this scope\n'
r'.*error: template argument 1 is invalid\n')
# This version works when the type is used as a parameter of a mock
# function that has multiple parameters.
gcc_regex_type_of_a_param = (
r'error: expected `;\' before \'::\' token\n'
+ _GCC_FILE_LINE_RE +
r'error: \'(?P<type>.+)\' was not declared in this scope\n'
r'.*error: template argument 1 is invalid\n'
r'.*error: \'.+\' was not declared in this scope')
clang_regex_type_of_retval_or_sole_param = (
_CLANG_FILE_LINE_RE +
r'error: use of undeclared identifier \'(?P<type>.*)\'\n'
r'(.*\n)*?'
r'(?P=file):(?P=line):\d+: error: '
r'non-friend class member \'Result\' cannot have a qualified name'
)
clang_regex_type_of_a_param = (
_CLANG_FILE_LINE_RE +
r'error: C\+\+ requires a type specifier for all declarations\n'
r'(.*\n)*?'
r'(?P=file):(?P=line):(?P=column): error: '
r'C\+\+ requires a type specifier for all declarations'
)
clang_regex_unknown_type = (
_CLANG_FILE_LINE_RE +
r'error: unknown type name \'(?P<type>[^\']+)\''
)
diagnosis = """
In a mock class template, types or typedefs defined in the base class
template are *not* automatically visible. This is how C++ works. Before
you can use a type or typedef named %(type)s defined in base class Base<T>, you
need to make it visible. One way to do it is:
typedef typename Base<T>::%(type)s %(type)s;"""
for diag in _GenericDiagnoser(
'TTB', 'Type in Template Base',
[(gcc_4_3_1_regex_type_in_retval, diagnosis % {'type': 'Foo'}),
(gcc_4_4_0_regex_type_in_retval, diagnosis % {'type': 'Foo'}),
(gcc_regex_type_of_sole_param, diagnosis),
(gcc_regex_type_of_a_param, diagnosis),
(clang_regex_type_of_retval_or_sole_param, diagnosis),
(clang_regex_type_of_a_param, diagnosis % {'type': 'Foo'})],
msg):
yield diag
# Avoid overlap with the NUS pattern.
for m in _FindAllMatches(clang_regex_unknown_type, msg):
type_ = m.groupdict()['type']
if type_ not in _COMMON_GMOCK_SYMBOLS:
yield ('TTB', 'Type in Template Base', diagnosis % m.groupdict())
def _WrongMockMethodMacroDiagnoser(msg):
"""Diagnoses the WMM disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE +
r'.*this_method_does_not_take_(?P<wrong_args>\d+)_argument.*\n'
r'.*\n'
r'.*candidates are.*FunctionMocker<[^>]+A(?P<args>\d+)\)>')
clang_regex = (_CLANG_NON_GMOCK_FILE_LINE_RE +
r'error:.*array.*negative.*r?\n'
r'(.*\n)*?'
r'(?P=file):(?P=line):(?P=column): error: too few arguments '
r'to function call, expected (?P<args>\d+), '
r'have (?P<wrong_args>\d+)')
clang11_re = (_CLANG_NON_GMOCK_FILE_LINE_RE +
r'.*this_method_does_not_take_'
r'(?P<wrong_args>\d+)_argument.*')
diagnosis = """
You are using MOCK_METHOD%(wrong_args)s to define a mock method that has
%(args)s arguments. Use MOCK_METHOD%(args)s (or MOCK_CONST_METHOD%(args)s,
MOCK_METHOD%(args)s_T, MOCK_CONST_METHOD%(args)s_T as appropriate) instead."""
return _GenericDiagnoser('WMM', 'Wrong MOCK_METHODn Macro',
[(gcc_regex, diagnosis),
(clang11_re, diagnosis % {'wrong_args': 'm',
'args': 'n'}),
(clang_regex, diagnosis)],
msg)
def _WrongParenPositionDiagnoser(msg):
"""Diagnoses the WPP disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE +
r'error:.*testing::internal::MockSpec<.* has no member named \''
r'(?P<method>\w+)\'')
clang_regex = (_CLANG_NON_GMOCK_FILE_LINE_RE +
r'error: no member named \'(?P<method>\w+)\' in '
r'\'testing::internal::MockSpec<.*>\'')
diagnosis = """
The closing parenthesis of ON_CALL or EXPECT_CALL should be *before*
".%(method)s". For example, you should write:
EXPECT_CALL(my_mock, Foo(_)).%(method)s(...);
instead of:
EXPECT_CALL(my_mock, Foo(_).%(method)s(...));"""
return _GenericDiagnoser('WPP', 'Wrong Parenthesis Position',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis)],
msg)
_DIAGNOSERS = [
_IncompleteByReferenceArgumentDiagnoser,
_MockObjectPointerDiagnoser,
_NeedToReturnNothingDiagnoser,
_NeedToReturnReferenceDiagnoser,
_NeedToReturnSomethingDiagnoser,
_NeedToUseReturnNullDiagnoser,
_NeedToUseSymbolDiagnoser,
_OverloadedFunctionActionDiagnoser,
_OverloadedFunctionMatcherDiagnoser,
_OverloadedMethodActionDiagnoser,
_TypeInTemplatedBaseDiagnoser,
_WrongMockMethodMacroDiagnoser,
_WrongParenPositionDiagnoser,
]
def Diagnose(msg):
"""Generates all possible diagnoses given the compiler error message."""
msg = re.sub(r'\x1b\[[^m]*m', '', msg) # Strips all color formatting.
# Assuming the string is using the UTF-8 encoding, replaces the left and
# the right single quote characters with apostrophes.
msg = re.sub(r'(\xe2\x80\x98|\xe2\x80\x99)', "'", msg)
diagnoses = []
for diagnoser in _DIAGNOSERS:
for diag in diagnoser(msg):
diagnosis = '[%s - %s]\n%s' % diag
if not diagnosis in diagnoses:
diagnoses.append(diagnosis)
return diagnoses
def main():
print ('Google Mock Doctor v%s - '
'diagnoses problems in code using Google Mock.' % _VERSION)
if sys.stdin.isatty():
print ('Please copy and paste the compiler errors here. Press c-D when '
'you are done:')
else:
print ('Waiting for compiler errors on stdin . . .')
msg = sys.stdin.read().strip()
diagnoses = Diagnose(msg)
count = len(diagnoses)
if not count:
print ("""
Your compiler complained:
8<------------------------------------------------------------
%s
------------------------------------------------------------>8
Uh-oh, I'm not smart enough to figure out what the problem is. :-(
However...
If you send your source code and the compiler's error messages to
%s, you can be helped and I can get smarter --
win-win for us!""" % (msg, _EMAIL))
else:
print ('------------------------------------------------------------')
print ('Your code appears to have the following',)
if count > 1:
print ('%s diseases:' % (count,))
else:
print ('disease:')
i = 0
for d in diagnoses:
i += 1
if count > 1:
print ('\n#%s:' % (i,))
print (d)
print ("""
How did I do? If you think I'm wrong or unhelpful, please send your
source code and the compiler's error messages to %s.
Then you can be helped and I can get smarter -- I promise I won't be upset!""" %
_EMAIL)
if __name__ == '__main__':
main()
| gpl-3.0 |
AndrewGrossman/django | django/views/decorators/debug.py | 712 | 2627 | import functools
from django.http import HttpRequest
def sensitive_variables(*variables):
"""
Indicates which variables used in the decorated function are sensitive, so
that those variables can later be treated in a special way, for example
by hiding them when logging unhandled exceptions.
Two forms are accepted:
* with specified variable names:
@sensitive_variables('user', 'password', 'credit_card')
def my_function(user):
password = user.pass_word
credit_card = user.credit_card_number
...
* without any specified variable names, in which case it is assumed that
all variables are considered sensitive:
@sensitive_variables()
def my_function()
...
"""
def decorator(func):
@functools.wraps(func)
def sensitive_variables_wrapper(*func_args, **func_kwargs):
if variables:
sensitive_variables_wrapper.sensitive_variables = variables
else:
sensitive_variables_wrapper.sensitive_variables = '__ALL__'
return func(*func_args, **func_kwargs)
return sensitive_variables_wrapper
return decorator
def sensitive_post_parameters(*parameters):
"""
Indicates which POST parameters used in the decorated view are sensitive,
so that those parameters can later be treated in a special way, for example
by hiding them when logging unhandled exceptions.
Two forms are accepted:
* with specified parameters:
@sensitive_post_parameters('password', 'credit_card')
def my_view(request):
pw = request.POST['password']
cc = request.POST['credit_card']
...
* without any specified parameters, in which case it is assumed that
all parameters are considered sensitive:
@sensitive_post_parameters()
def my_view(request)
...
"""
def decorator(view):
@functools.wraps(view)
def sensitive_post_parameters_wrapper(request, *args, **kwargs):
assert isinstance(request, HttpRequest), (
"sensitive_post_parameters didn't receive an HttpRequest. "
"If you are decorating a classmethod, be sure to use "
"@method_decorator."
)
if parameters:
request.sensitive_post_parameters = parameters
else:
request.sensitive_post_parameters = '__ALL__'
return view(request, *args, **kwargs)
return sensitive_post_parameters_wrapper
return decorator
| bsd-3-clause |
reubano/csvkit | csvkit/convert/__init__.py | 17 | 2264 | #!/usr/bin/env python
import six
from csvkit.convert.csvitself import csv2csv
from csvkit.convert.fixed import fixed2csv
from csvkit.convert.geojs import geojson2csv
from csvkit.convert.js import json2csv
from csvkit.convert.ndjs import ndjson2csv
from csvkit.convert.xls import xls2csv
from csvkit.convert.xlsx import xlsx2csv
SUPPORTED_FORMATS = ['fixed', 'xls', 'xlsx', 'csv', 'json', 'geojson', 'ndjson']
# DBF is supported for Python 2 only
if six.PY2:
from csvkit.convert.dbase import dbf2csv
SUPPORTED_FORMATS.append('dbf')
def convert(f, format, schema=None, key=None, **kwargs):
"""
Convert a file of a specified format to CSV.
"""
if not f:
raise ValueError('f must not be None')
if not format:
raise ValueError('format must not be None')
if format == 'fixed':
if not schema:
raise ValueError('schema must not be null when format is "fixed"')
return fixed2csv(f, schema, **kwargs)
elif format == 'xls':
return xls2csv(f, **kwargs)
elif format == 'xlsx':
return xlsx2csv(f, **kwargs)
elif format == 'json':
return json2csv(f, key, **kwargs)
elif format == 'ndjson':
return ndjson2csv(f, **kwargs)
elif format == 'geojson':
return geojson2csv(f, **kwargs)
elif format == 'csv':
return csv2csv(f, **kwargs)
elif format == 'dbf':
if six.PY3:
raise ValueError('format "dbf" is not supported forthis version of Python.')
return dbf2csv(f, **kwargs)
else:
raise ValueError('format "%s" is not supported' % format)
def guess_format(filename):
"""
Try to guess a file's format based on its extension (or lack thereof).
"""
last_period = filename.rfind('.')
if last_period == -1:
# No extension: assume fixed-width
return 'fixed'
extension = filename[last_period + 1:]
if extension == 'xls':
return extension
elif extension == 'xlsx':
return extension
elif extension in ['json', 'js']:
return 'json'
elif extension == 'csv':
return extension
elif extension == 'fixed':
return extension
elif extension == 'dbf':
return extension
return None
| mit |
ironbox360/django | tests/gis_tests/geo3d/models.py | 302 | 1294 | from django.utils.encoding import python_2_unicode_compatible
from ..models import models
@python_2_unicode_compatible
class NamedModel(models.Model):
name = models.CharField(max_length=30)
objects = models.GeoManager()
class Meta:
abstract = True
required_db_features = ['gis_enabled']
def __str__(self):
return self.name
class City3D(NamedModel):
point = models.PointField(dim=3)
class Interstate2D(NamedModel):
line = models.LineStringField(srid=4269)
class Interstate3D(NamedModel):
line = models.LineStringField(dim=3, srid=4269)
class InterstateProj2D(NamedModel):
line = models.LineStringField(srid=32140)
class InterstateProj3D(NamedModel):
line = models.LineStringField(dim=3, srid=32140)
class Polygon2D(NamedModel):
poly = models.PolygonField(srid=32140)
class Polygon3D(NamedModel):
poly = models.PolygonField(dim=3, srid=32140)
class SimpleModel(models.Model):
objects = models.GeoManager()
class Meta:
abstract = True
required_db_features = ['gis_enabled']
class Point2D(SimpleModel):
point = models.PointField()
class Point3D(SimpleModel):
point = models.PointField(dim=3)
class MultiPoint3D(SimpleModel):
mpoint = models.MultiPointField(dim=3)
| bsd-3-clause |
darcy0511/Dato-Core | src/unity/python/graphlab/test/test_dataframe.py | 13 | 1711 | '''
Copyright (C) 2015 Dato, Inc.
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the DATO-PYTHON-LICENSE file for details.
'''
import unittest
import pandas
import array
from graphlab.cython.cy_dataframe import _dataframe
from pandas.util.testing import assert_frame_equal
class DataFrameTest(unittest.TestCase):
def test_empty(self):
expected = pandas.DataFrame()
assert_frame_equal(_dataframe(expected), expected)
expected['int'] = []
expected['float'] = []
expected['str'] = []
assert_frame_equal(_dataframe(expected), expected)
def test_simple_dataframe(self):
expected = pandas.DataFrame()
expected['int'] = [i for i in range(10)]
expected['float'] = [float(i) for i in range(10)]
expected['str'] = [str(i) for i in range(10)]
expected['unicode'] = [unicode(i) for i in range(10)]
expected['array'] = [array.array('d', [i]) for i in range(10)]
expected['ls'] = [[str(i)] for i in range(10)]
assert_frame_equal(_dataframe(expected), expected)
def test_sparse_dataframe(self):
expected = pandas.DataFrame()
expected['sparse_int'] = [i if i % 2 == 0 else None for i in range(10)]
expected['sparse_float'] = [float(i) if i % 2 == 1 else None for i in range(10)]
expected['sparse_str'] = [str(i) if i % 3 == 0 else None for i in range(10)]
expected['sparse_array'] = [array.array('d', [i]) if i % 5 == 0 else None for i in range(10)]
expected['sparse_list'] = [[str(i)] if i % 7 == 0 else None for i in range(10)]
assert_frame_equal(_dataframe(expected), expected)
| agpl-3.0 |
andykimpe/chromium-test-npapi | native_client_sdk/src/build_tools/buildbot_run.py | 10 | 4283 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Main entry point for the NaCl SDK buildbot.
The entry point used to be build_sdk.py itself, but we want
to be able to simplify build_sdk (for example separating out
the test code into test_sdk) and change its default behaviour
while being able to separately control excactly what the bots
run.
"""
import buildbot_common
import os
import optparse
import subprocess
import sys
from buildbot_common import Run
from build_paths import SRC_DIR, SDK_SRC_DIR, SCRIPT_DIR
import getos
def StepArmRunHooks():
if getos.GetPlatform() != 'linux':
return
# Run 'gclient runhooks' for arm, as some arm specific tools are only
# installed in that case.
buildbot_common.BuildStep('gclient runhooks for arm')
env = dict(os.environ)
env['GYP_DEFINES'] = 'target_arch=arm'
Run(['gclient', 'runhooks'], env=env, cwd=SDK_SRC_DIR)
def StepRunUnittests():
buildbot_common.BuildStep('Run unittests')
# Our tests shouldn't be using the proxy; they should all be connecting to
# localhost. Some slaves can't route HTTP traffic through the proxy to
# localhost (we get 504 gateway errors), so we clear it here.
env = dict(os.environ)
if 'http_proxy' in env:
del env['http_proxy']
Run([sys.executable, 'test_all.py'], env=env, cwd=SDK_SRC_DIR)
def StepBuildSDK():
is_win = getos.GetPlatform() == 'win'
# Windows has a path length limit of 255 characters, after joining cwd with a
# relative path. Use subst before building to keep the path lengths short.
if is_win:
subst_drive = 'S:'
root_dir = os.path.dirname(SRC_DIR)
new_root_dir = subst_drive + '\\'
subprocess.check_call(['subst', subst_drive, root_dir])
new_script_dir = os.path.join(new_root_dir,
os.path.relpath(SCRIPT_DIR, root_dir))
else:
new_script_dir = SCRIPT_DIR
args = [sys.executable, 'build_sdk.py']
if 'bionic' in os.getenv('BUILDBOT_BUILDERNAME', ''):
args.append('--bionic')
try:
Run(args, cwd=new_script_dir)
finally:
if is_win:
subprocess.check_call(['subst', '/D', subst_drive])
def StepTestSDK():
cmd = []
if getos.GetPlatform() == 'linux':
# Run all of test_sdk.py under xvfb-run; it's startup time leaves something
# to be desired, so only start it up once.
# We also need to make sure that there are at least 24 bits per pixel.
# https://code.google.com/p/chromium/issues/detail?id=316687
cmd.extend([
'xvfb-run',
'--auto-servernum',
'--server-args', '-screen 0 1024x768x24'
])
cmd.extend([sys.executable, 'test_sdk.py'])
Run(cmd, cwd=SCRIPT_DIR)
def main(args):
# Don't write out .pyc files in the source tree. Without this, incremental
# builds can fail when .py files are moved/deleted, since python could load
# orphaned .pyc files generated by a previous run.
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
parser = optparse.OptionParser(description=__doc__)
parser.add_option('--build-only', action='store_true',
help='Only build the SDK, don\'t build or run tests.')
parser.add_option('--build-properties',
help='JSON properties passed by buildbot. Currently ignored.')
parser.add_option('--factory-properties',
help='JSON properties passed by buildbot. Currently ignored.')
options, args = parser.parse_args(args)
# Skip the testing phase if we are running on a build-only bots.
if not options.build_only:
# Infer build-only from bot name.
# TODO(sbc): Remove this once buildbot script have been updated
# to pass --build-only argument.
if os.getenv('BUILDBOT_BUILDERNAME', '').endswith('build'):
options.build_only = True
# TODO(noelallen): Enable testing on bionic when we have an ARM solution.
if 'bionic' in os.getenv('BUILDBOT_BUILDERNAME', ''):
options.build_only = True
StepArmRunHooks()
StepRunUnittests()
StepBuildSDK()
if not options.build_only:
StepTestSDK()
return 0
if __name__ == '__main__':
try:
sys.exit(main(sys.argv[1:]))
except KeyboardInterrupt:
buildbot_common.ErrorExit('buildbot_run: interrupted')
| bsd-3-clause |
DrOctogon/unwash_ecom | oscar/apps/wishlists/migrations/0001_initial.py | 15 | 17221 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from oscar.core.compat import AUTH_USER_MODEL, AUTH_USER_MODEL_NAME
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'WishList'
db.create_table('wishlists_wishlist', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('owner', self.gf('django.db.models.fields.related.ForeignKey')(related_name='wishlists', to=orm[AUTH_USER_MODEL])),
('name', self.gf('django.db.models.fields.CharField')(default=u'New Wish List', max_length=255)),
('key', self.gf('django.db.models.fields.CharField')(unique=True, max_length=6, db_index=True)),
('visibility', self.gf('django.db.models.fields.CharField')(default='Private', max_length=20)),
('date_created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal('wishlists', ['WishList'])
# Adding model 'Line'
db.create_table('wishlists_line', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('wishlist', self.gf('django.db.models.fields.related.ForeignKey')(related_name='lines', to=orm['wishlists.WishList'])),
('product', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='wishlists_lines', null=True, on_delete=models.SET_NULL, to=orm['catalogue.Product'])),
('quantity', self.gf('django.db.models.fields.PositiveIntegerField')(default=1)),
('title', self.gf('django.db.models.fields.CharField')(max_length=255)),
))
db.send_create_signal('wishlists', ['Line'])
# Adding unique constraint on 'Line', fields ['wishlist', 'product']
db.create_unique('wishlists_line', ['wishlist_id', 'product_id'])
def backwards(self, orm):
# Removing unique constraint on 'Line', fields ['wishlist', 'product']
db.delete_unique('wishlists_line', ['wishlist_id', 'product_id'])
# Deleting model 'WishList'
db.delete_table('wishlists_wishlist')
# Deleting model 'Line'
db.delete_table('wishlists_line')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
AUTH_USER_MODEL: {
'Meta': {'object_name': AUTH_USER_MODEL_NAME},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'catalogue.attributeentity': {
'Meta': {'object_name': 'AttributeEntity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': "orm['catalogue.AttributeEntityType']"})
},
'catalogue.attributeentitytype': {
'Meta': {'object_name': 'AttributeEntityType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'})
},
'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['catalogue.AttributeOptionGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.ProductAttribute']", 'through': "orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Category']", 'through': "orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': "orm['catalogue.Product']"}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductClass']", 'null': 'True'}),
'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'rating': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Product']", 'symmetrical': 'False', 'through': "orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': "orm['catalogue.Product']"}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'catalogue.productattribute': {
'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': "orm['catalogue.ProductClass']"}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
},
'catalogue.productattributevalue': {
'Meta': {'object_name': 'ProductAttributeValue'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductAttribute']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': "orm['catalogue.Product']"}),
'value_boolean': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'catalogue.productcategory': {
'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'catalogue.productclass': {
'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'requires_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'track_stock': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalogue.productrecommendation': {
'Meta': {'object_name': 'ProductRecommendation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': "orm['catalogue.Product']"}),
'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'wishlists.line': {
'Meta': {'unique_together': "(('wishlist', 'product'),)", 'object_name': 'Line'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'wishlists_lines'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['catalogue.Product']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'wishlist': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lines'", 'to': "orm['wishlists.WishList']"})
},
'wishlists.wishlist': {
'Meta': {'ordering': "('owner', 'date_created')", 'object_name': 'WishList'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '6', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "u'New Wish List'", 'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'wishlists'", 'to': "orm['{0}']".format(AUTH_USER_MODEL)}),
'visibility': ('django.db.models.fields.CharField', [], {'default': "'Private'", 'max_length': '20'})
}
}
complete_apps = ['wishlists'] | bsd-3-clause |
batxes/4Cin | SHH_WT_models_highres/SHH_WT_models_highres_final_output_0.1_-0.1_5000/mtx1_models/SHH_WT_models_highres17943.py | 4 | 88227 | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "particle_0 geometry" not in marker_sets:
s=new_marker_set('particle_0 geometry')
marker_sets["particle_0 geometry"]=s
s= marker_sets["particle_0 geometry"]
mark=s.place_marker((2700.19, -336.071, 367.214), (0.7, 0.7, 0.7), 182.271)
if "particle_1 geometry" not in marker_sets:
s=new_marker_set('particle_1 geometry')
marker_sets["particle_1 geometry"]=s
s= marker_sets["particle_1 geometry"]
mark=s.place_marker((2247.87, -340.378, 514.411), (0.7, 0.7, 0.7), 258.199)
if "particle_2 geometry" not in marker_sets:
s=new_marker_set('particle_2 geometry')
marker_sets["particle_2 geometry"]=s
s= marker_sets["particle_2 geometry"]
mark=s.place_marker((2050.44, -46.2836, 775.274), (0.7, 0.7, 0.7), 123.897)
if "particle_3 geometry" not in marker_sets:
s=new_marker_set('particle_3 geometry')
marker_sets["particle_3 geometry"]=s
s= marker_sets["particle_3 geometry"]
mark=s.place_marker((2022.45, -450.205, 829.88), (0.7, 0.7, 0.7), 146.739)
if "particle_4 geometry" not in marker_sets:
s=new_marker_set('particle_4 geometry')
marker_sets["particle_4 geometry"]=s
s= marker_sets["particle_4 geometry"]
mark=s.place_marker((2040.11, -858.335, 1006.37), (0.7, 0.7, 0.7), 179.098)
if "particle_5 geometry" not in marker_sets:
s=new_marker_set('particle_5 geometry')
marker_sets["particle_5 geometry"]=s
s= marker_sets["particle_5 geometry"]
mark=s.place_marker((2205.71, -370.054, 1286.72), (0.7, 0.7, 0.7), 148.854)
if "particle_6 geometry" not in marker_sets:
s=new_marker_set('particle_6 geometry')
marker_sets["particle_6 geometry"]=s
s= marker_sets["particle_6 geometry"]
mark=s.place_marker((2317.22, 21.8313, 1610.77), (0.7, 0.7, 0.7), 196.357)
if "particle_7 geometry" not in marker_sets:
s=new_marker_set('particle_7 geometry')
marker_sets["particle_7 geometry"]=s
s= marker_sets["particle_7 geometry"]
mark=s.place_marker((1991.23, -355.98, 1808.2), (0.7, 0.7, 0.7), 166.873)
if "particle_8 geometry" not in marker_sets:
s=new_marker_set('particle_8 geometry')
marker_sets["particle_8 geometry"]=s
s= marker_sets["particle_8 geometry"]
mark=s.place_marker((1733.48, -780.546, 2063.83), (0.7, 0.7, 0.7), 95.4711)
if "particle_9 geometry" not in marker_sets:
s=new_marker_set('particle_9 geometry')
marker_sets["particle_9 geometry"]=s
s= marker_sets["particle_9 geometry"]
mark=s.place_marker((1972.37, -444.609, 2117.4), (0.7, 0.7, 0.7), 185.401)
if "particle_10 geometry" not in marker_sets:
s=new_marker_set('particle_10 geometry')
marker_sets["particle_10 geometry"]=s
s= marker_sets["particle_10 geometry"]
mark=s.place_marker((2277.07, -98.7291, 1947.4), (0.7, 0.7, 0.7), 151.984)
if "particle_11 geometry" not in marker_sets:
s=new_marker_set('particle_11 geometry')
marker_sets["particle_11 geometry"]=s
s= marker_sets["particle_11 geometry"]
mark=s.place_marker((2597.65, 350.58, 1690.86), (0.7, 0.7, 0.7), 185.612)
if "particle_12 geometry" not in marker_sets:
s=new_marker_set('particle_12 geometry')
marker_sets["particle_12 geometry"]=s
s= marker_sets["particle_12 geometry"]
mark=s.place_marker((2945.15, 364.974, 1465.44), (0.7, 0.7, 0.7), 210.273)
if "particle_13 geometry" not in marker_sets:
s=new_marker_set('particle_13 geometry')
marker_sets["particle_13 geometry"]=s
s= marker_sets["particle_13 geometry"]
mark=s.place_marker((2929.79, 89.9393, 1404.61), (0.7, 0.7, 0.7), 106.892)
if "particle_14 geometry" not in marker_sets:
s=new_marker_set('particle_14 geometry')
marker_sets["particle_14 geometry"]=s
s= marker_sets["particle_14 geometry"]
mark=s.place_marker((3270.75, -48.8087, 1243.46), (0.7, 0.7, 0.7), 202.025)
if "particle_15 geometry" not in marker_sets:
s=new_marker_set('particle_15 geometry')
marker_sets["particle_15 geometry"]=s
s= marker_sets["particle_15 geometry"]
mark=s.place_marker((3607.47, -124.817, 836.952), (0.7, 0.7, 0.7), 192.169)
if "particle_16 geometry" not in marker_sets:
s=new_marker_set('particle_16 geometry')
marker_sets["particle_16 geometry"]=s
s= marker_sets["particle_16 geometry"]
mark=s.place_marker((3933.92, -107.776, 330.522), (0.7, 0.7, 0.7), 241.11)
if "particle_17 geometry" not in marker_sets:
s=new_marker_set('particle_17 geometry')
marker_sets["particle_17 geometry"]=s
s= marker_sets["particle_17 geometry"]
mark=s.place_marker((4132.16, 152.686, -50.088), (0.7, 0.7, 0.7), 128.465)
if "particle_18 geometry" not in marker_sets:
s=new_marker_set('particle_18 geometry')
marker_sets["particle_18 geometry"]=s
s= marker_sets["particle_18 geometry"]
mark=s.place_marker((4205.18, 332.627, -539.447), (0.7, 0.7, 0.7), 217.38)
if "particle_19 geometry" not in marker_sets:
s=new_marker_set('particle_19 geometry')
marker_sets["particle_19 geometry"]=s
s= marker_sets["particle_19 geometry"]
mark=s.place_marker((4255.55, 175.812, -1205.36), (0.7, 0.7, 0.7), 184.555)
if "particle_20 geometry" not in marker_sets:
s=new_marker_set('particle_20 geometry')
marker_sets["particle_20 geometry"]=s
s= marker_sets["particle_20 geometry"]
mark=s.place_marker((4094.41, 538.796, -722.1), (0.7, 0.7, 0.7), 140.055)
if "particle_21 geometry" not in marker_sets:
s=new_marker_set('particle_21 geometry')
marker_sets["particle_21 geometry"]=s
s= marker_sets["particle_21 geometry"]
mark=s.place_marker((4314.19, 596.467, -359.566), (0.7, 0.7, 0.7), 169.708)
if "particle_22 geometry" not in marker_sets:
s=new_marker_set('particle_22 geometry')
marker_sets["particle_22 geometry"]=s
s= marker_sets["particle_22 geometry"]
mark=s.place_marker((4706.28, 675.898, -210.965), (0.7, 0.7, 0.7), 184.639)
if "particle_23 geometry" not in marker_sets:
s=new_marker_set('particle_23 geometry')
marker_sets["particle_23 geometry"]=s
s= marker_sets["particle_23 geometry"]
mark=s.place_marker((4711.16, 1014.83, -170.324), (0.7, 0.7, 0.7), 119.286)
if "particle_24 geometry" not in marker_sets:
s=new_marker_set('particle_24 geometry')
marker_sets["particle_24 geometry"]=s
s= marker_sets["particle_24 geometry"]
mark=s.place_marker((4588.86, 1254.84, -346.372), (0.7, 0.7, 0.7), 147.754)
if "particle_25 geometry" not in marker_sets:
s=new_marker_set('particle_25 geometry')
marker_sets["particle_25 geometry"]=s
s= marker_sets["particle_25 geometry"]
mark=s.place_marker((4312.65, 1258.59, -449.74), (0.7, 0.7, 0.7), 171.4)
if "particle_26 geometry" not in marker_sets:
s=new_marker_set('particle_26 geometry')
marker_sets["particle_26 geometry"]=s
s= marker_sets["particle_26 geometry"]
mark=s.place_marker((4199.6, 1165.83, -46.2009), (0.7, 0.7, 0.7), 156.341)
if "particle_27 geometry" not in marker_sets:
s=new_marker_set('particle_27 geometry')
marker_sets["particle_27 geometry"]=s
s= marker_sets["particle_27 geometry"]
mark=s.place_marker((3740.84, 1290.69, 305.487), (0.7, 0.7, 0.7), 186.501)
if "particle_28 geometry" not in marker_sets:
s=new_marker_set('particle_28 geometry')
marker_sets["particle_28 geometry"]=s
s= marker_sets["particle_28 geometry"]
mark=s.place_marker((3352.63, 1446.81, 658.049), (0.7, 0.7, 0.7), 308.325)
if "particle_29 geometry" not in marker_sets:
s=new_marker_set('particle_29 geometry')
marker_sets["particle_29 geometry"]=s
s= marker_sets["particle_29 geometry"]
mark=s.place_marker((3172.73, 1253.26, 1024.03), (0.7, 0.7, 0.7), 138.617)
if "particle_30 geometry" not in marker_sets:
s=new_marker_set('particle_30 geometry')
marker_sets["particle_30 geometry"]=s
s= marker_sets["particle_30 geometry"]
mark=s.place_marker((3170.3, 1213.29, 1331.74), (0.7, 0.7, 0.7), 130.03)
if "particle_31 geometry" not in marker_sets:
s=new_marker_set('particle_31 geometry')
marker_sets["particle_31 geometry"]=s
s= marker_sets["particle_31 geometry"]
mark=s.place_marker((3382.72, 1255.68, 1095.32), (0.7, 0.7, 0.7), 156.552)
if "particle_32 geometry" not in marker_sets:
s=new_marker_set('particle_32 geometry')
marker_sets["particle_32 geometry"]=s
s= marker_sets["particle_32 geometry"]
mark=s.place_marker((3171.25, 1244.13, 879.514), (0.7, 0.7, 0.7), 183.244)
if "particle_33 geometry" not in marker_sets:
s=new_marker_set('particle_33 geometry')
marker_sets["particle_33 geometry"]=s
s= marker_sets["particle_33 geometry"]
mark=s.place_marker((2987.92, 1255.98, 680.31), (0.7, 0.7, 0.7), 181.382)
if "particle_34 geometry" not in marker_sets:
s=new_marker_set('particle_34 geometry')
marker_sets["particle_34 geometry"]=s
s= marker_sets["particle_34 geometry"]
mark=s.place_marker((2946.35, 1412.6, 555.042), (0.7, 0.7, 0.7), 101.943)
if "particle_35 geometry" not in marker_sets:
s=new_marker_set('particle_35 geometry')
marker_sets["particle_35 geometry"]=s
s= marker_sets["particle_35 geometry"]
mark=s.place_marker((2906.48, 1444.47, 201.03), (1, 0.7, 0), 138.913)
if "particle_36 geometry" not in marker_sets:
s=new_marker_set('particle_36 geometry')
marker_sets["particle_36 geometry"]=s
s= marker_sets["particle_36 geometry"]
mark=s.place_marker((2339.09, 588.942, 509.948), (0.7, 0.7, 0.7), 221.737)
if "particle_37 geometry" not in marker_sets:
s=new_marker_set('particle_37 geometry')
marker_sets["particle_37 geometry"]=s
s= marker_sets["particle_37 geometry"]
mark=s.place_marker((1672.98, 114.261, 738.631), (0.7, 0.7, 0.7), 256.38)
if "particle_38 geometry" not in marker_sets:
s=new_marker_set('particle_38 geometry')
marker_sets["particle_38 geometry"]=s
s= marker_sets["particle_38 geometry"]
mark=s.place_marker((1159.67, 238.99, 1114.13), (0.7, 0.7, 0.7), 221.694)
if "particle_39 geometry" not in marker_sets:
s=new_marker_set('particle_39 geometry')
marker_sets["particle_39 geometry"]=s
s= marker_sets["particle_39 geometry"]
mark=s.place_marker((1342.56, 665.46, 1618.08), (0.7, 0.7, 0.7), 259.341)
if "particle_40 geometry" not in marker_sets:
s=new_marker_set('particle_40 geometry')
marker_sets["particle_40 geometry"]=s
s= marker_sets["particle_40 geometry"]
mark=s.place_marker((2013.45, 1036.23, 1866.01), (0.7, 0.7, 0.7), 117.89)
if "particle_41 geometry" not in marker_sets:
s=new_marker_set('particle_41 geometry')
marker_sets["particle_41 geometry"]=s
s= marker_sets["particle_41 geometry"]
mark=s.place_marker((2757.14, 1395.55, 1659.07), (0.7, 0.7, 0.7), 116.071)
if "particle_42 geometry" not in marker_sets:
s=new_marker_set('particle_42 geometry')
marker_sets["particle_42 geometry"]=s
s= marker_sets["particle_42 geometry"]
mark=s.place_marker((2807.66, 1728.97, 1297.16), (0.7, 0.7, 0.7), 268.224)
if "particle_43 geometry" not in marker_sets:
s=new_marker_set('particle_43 geometry')
marker_sets["particle_43 geometry"]=s
s= marker_sets["particle_43 geometry"]
mark=s.place_marker((2490.45, 1779.79, 1314.87), (0.7, 0.7, 0.7), 386.918)
if "particle_44 geometry" not in marker_sets:
s=new_marker_set('particle_44 geometry')
marker_sets["particle_44 geometry"]=s
s= marker_sets["particle_44 geometry"]
mark=s.place_marker((2061.64, 1613.2, 1737.67), (0.7, 0.7, 0.7), 121.316)
if "particle_45 geometry" not in marker_sets:
s=new_marker_set('particle_45 geometry')
marker_sets["particle_45 geometry"]=s
s= marker_sets["particle_45 geometry"]
mark=s.place_marker((1614.45, 1673.29, 1751.71), (0.7, 0.7, 0.7), 138.363)
if "particle_46 geometry" not in marker_sets:
s=new_marker_set('particle_46 geometry')
marker_sets["particle_46 geometry"]=s
s= marker_sets["particle_46 geometry"]
mark=s.place_marker((1941.93, 1764.3, 1127.92), (1, 0.7, 0), 175.207)
if "particle_47 geometry" not in marker_sets:
s=new_marker_set('particle_47 geometry')
marker_sets["particle_47 geometry"]=s
s= marker_sets["particle_47 geometry"]
mark=s.place_marker((1257.96, 1800.29, 1345.67), (0.7, 0.7, 0.7), 131.468)
if "particle_48 geometry" not in marker_sets:
s=new_marker_set('particle_48 geometry')
marker_sets["particle_48 geometry"]=s
s= marker_sets["particle_48 geometry"]
mark=s.place_marker((537.751, 1948.71, 1379.82), (0.7, 0.7, 0.7), 287.894)
if "particle_49 geometry" not in marker_sets:
s=new_marker_set('particle_49 geometry')
marker_sets["particle_49 geometry"]=s
s= marker_sets["particle_49 geometry"]
mark=s.place_marker((948.359, 2284.97, 1452.67), (0.7, 0.7, 0.7), 88.1109)
if "particle_50 geometry" not in marker_sets:
s=new_marker_set('particle_50 geometry')
marker_sets["particle_50 geometry"]=s
s= marker_sets["particle_50 geometry"]
mark=s.place_marker((1545.46, 2265.61, 1496.94), (0.7, 0.7, 0.7), 145.385)
if "particle_51 geometry" not in marker_sets:
s=new_marker_set('particle_51 geometry')
marker_sets["particle_51 geometry"]=s
s= marker_sets["particle_51 geometry"]
mark=s.place_marker((1731.09, 2301.67, 1619.82), (0.7, 0.7, 0.7), 155.452)
if "particle_52 geometry" not in marker_sets:
s=new_marker_set('particle_52 geometry')
marker_sets["particle_52 geometry"]=s
s= marker_sets["particle_52 geometry"]
mark=s.place_marker((1176.52, 2527.43, 1801.31), (0.7, 0.7, 0.7), 145.512)
if "particle_53 geometry" not in marker_sets:
s=new_marker_set('particle_53 geometry')
marker_sets["particle_53 geometry"]=s
s= marker_sets["particle_53 geometry"]
mark=s.place_marker((739.825, 2689.22, 2014.51), (0.7, 0.7, 0.7), 99.9972)
if "particle_54 geometry" not in marker_sets:
s=new_marker_set('particle_54 geometry')
marker_sets["particle_54 geometry"]=s
s= marker_sets["particle_54 geometry"]
mark=s.place_marker((390.706, 2779.29, 2270.56), (0.7, 0.7, 0.7), 327.529)
if "particle_55 geometry" not in marker_sets:
s=new_marker_set('particle_55 geometry')
marker_sets["particle_55 geometry"]=s
s= marker_sets["particle_55 geometry"]
mark=s.place_marker((903.319, 2576.65, 2603.54), (0.7, 0.7, 0.7), 137.983)
if "particle_56 geometry" not in marker_sets:
s=new_marker_set('particle_56 geometry')
marker_sets["particle_56 geometry"]=s
s= marker_sets["particle_56 geometry"]
mark=s.place_marker((1394.53, 2558.31, 2471.43), (0.7, 0.7, 0.7), 83.3733)
if "particle_57 geometry" not in marker_sets:
s=new_marker_set('particle_57 geometry')
marker_sets["particle_57 geometry"]=s
s= marker_sets["particle_57 geometry"]
mark=s.place_marker((1903.25, 2521.26, 2213.76), (0.7, 0.7, 0.7), 101.562)
if "particle_58 geometry" not in marker_sets:
s=new_marker_set('particle_58 geometry')
marker_sets["particle_58 geometry"]=s
s= marker_sets["particle_58 geometry"]
mark=s.place_marker((2299.34, 2409.78, 1885.52), (0.7, 0.7, 0.7), 165.689)
if "particle_59 geometry" not in marker_sets:
s=new_marker_set('particle_59 geometry')
marker_sets["particle_59 geometry"]=s
s= marker_sets["particle_59 geometry"]
mark=s.place_marker((2173.58, 2510.37, 1617.24), (0.7, 0.7, 0.7), 136.925)
if "particle_60 geometry" not in marker_sets:
s=new_marker_set('particle_60 geometry')
marker_sets["particle_60 geometry"]=s
s= marker_sets["particle_60 geometry"]
mark=s.place_marker((2094.7, 2555.24, 1526.94), (0.7, 0.7, 0.7), 123.389)
if "particle_61 geometry" not in marker_sets:
s=new_marker_set('particle_61 geometry')
marker_sets["particle_61 geometry"]=s
s= marker_sets["particle_61 geometry"]
mark=s.place_marker((1873.53, 2775.59, 1849.11), (0.7, 0.7, 0.7), 184.47)
if "particle_62 geometry" not in marker_sets:
s=new_marker_set('particle_62 geometry')
marker_sets["particle_62 geometry"]=s
s= marker_sets["particle_62 geometry"]
mark=s.place_marker((1486.72, 3277.35, 2331.99), (0.7, 0.7, 0.7), 148.473)
if "particle_63 geometry" not in marker_sets:
s=new_marker_set('particle_63 geometry')
marker_sets["particle_63 geometry"]=s
s= marker_sets["particle_63 geometry"]
mark=s.place_marker((976.991, 3944.43, 2844.55), (0.7, 0.7, 0.7), 241.406)
if "particle_64 geometry" not in marker_sets:
s=new_marker_set('particle_64 geometry')
marker_sets["particle_64 geometry"]=s
s= marker_sets["particle_64 geometry"]
mark=s.place_marker((1174.47, 3320.41, 2922.82), (0.7, 0.7, 0.7), 182.736)
if "particle_65 geometry" not in marker_sets:
s=new_marker_set('particle_65 geometry')
marker_sets["particle_65 geometry"]=s
s= marker_sets["particle_65 geometry"]
mark=s.place_marker((1275.55, 2891.98, 2776.01), (0.7, 0.7, 0.7), 166.62)
if "particle_66 geometry" not in marker_sets:
s=new_marker_set('particle_66 geometry')
marker_sets["particle_66 geometry"]=s
s= marker_sets["particle_66 geometry"]
mark=s.place_marker((1341.18, 2913.9, 2497.99), (0.7, 0.7, 0.7), 113.872)
if "particle_67 geometry" not in marker_sets:
s=new_marker_set('particle_67 geometry')
marker_sets["particle_67 geometry"]=s
s= marker_sets["particle_67 geometry"]
mark=s.place_marker((1479.79, 2743.57, 2266.23), (0.7, 0.7, 0.7), 110.065)
if "particle_68 geometry" not in marker_sets:
s=new_marker_set('particle_68 geometry')
marker_sets["particle_68 geometry"]=s
s= marker_sets["particle_68 geometry"]
mark=s.place_marker((1447.87, 2603.51, 1912.55), (0.7, 0.7, 0.7), 150.08)
if "particle_69 geometry" not in marker_sets:
s=new_marker_set('particle_69 geometry')
marker_sets["particle_69 geometry"]=s
s= marker_sets["particle_69 geometry"]
mark=s.place_marker((1295.88, 2479.27, 1479.62), (0.7, 0.7, 0.7), 118.525)
if "particle_70 geometry" not in marker_sets:
s=new_marker_set('particle_70 geometry')
marker_sets["particle_70 geometry"]=s
s= marker_sets["particle_70 geometry"]
mark=s.place_marker((1064.46, 2518.2, 990.057), (0.7, 0.7, 0.7), 163.955)
if "particle_71 geometry" not in marker_sets:
s=new_marker_set('particle_71 geometry')
marker_sets["particle_71 geometry"]=s
s= marker_sets["particle_71 geometry"]
mark=s.place_marker((771.098, 2727.8, 916.927), (0.7, 0.7, 0.7), 170.131)
if "particle_72 geometry" not in marker_sets:
s=new_marker_set('particle_72 geometry')
marker_sets["particle_72 geometry"]=s
s= marker_sets["particle_72 geometry"]
mark=s.place_marker((634.108, 3012.84, 1598.44), (0.7, 0.7, 0.7), 78.2127)
if "particle_73 geometry" not in marker_sets:
s=new_marker_set('particle_73 geometry')
marker_sets["particle_73 geometry"]=s
s= marker_sets["particle_73 geometry"]
mark=s.place_marker((528.391, 3217.49, 2394.2), (0.7, 0.7, 0.7), 251.896)
if "particle_74 geometry" not in marker_sets:
s=new_marker_set('particle_74 geometry')
marker_sets["particle_74 geometry"]=s
s= marker_sets["particle_74 geometry"]
mark=s.place_marker((521.396, 3196.09, 3072.11), (0.7, 0.7, 0.7), 167.55)
if "particle_75 geometry" not in marker_sets:
s=new_marker_set('particle_75 geometry')
marker_sets["particle_75 geometry"]=s
s= marker_sets["particle_75 geometry"]
mark=s.place_marker((644.837, 3020.14, 3430.61), (0.7, 0.7, 0.7), 167.846)
if "particle_76 geometry" not in marker_sets:
s=new_marker_set('particle_76 geometry')
marker_sets["particle_76 geometry"]=s
s= marker_sets["particle_76 geometry"]
mark=s.place_marker((699.94, 3513.27, 3332.15), (0.7, 0.7, 0.7), 259.68)
if "particle_77 geometry" not in marker_sets:
s=new_marker_set('particle_77 geometry')
marker_sets["particle_77 geometry"]=s
s= marker_sets["particle_77 geometry"]
mark=s.place_marker((527.411, 3691.85, 2947.15), (0.7, 0.7, 0.7), 80.2854)
if "particle_78 geometry" not in marker_sets:
s=new_marker_set('particle_78 geometry')
marker_sets["particle_78 geometry"]=s
s= marker_sets["particle_78 geometry"]
mark=s.place_marker((392.897, 3637.73, 3100.86), (0.7, 0.7, 0.7), 82.4427)
if "particle_79 geometry" not in marker_sets:
s=new_marker_set('particle_79 geometry')
marker_sets["particle_79 geometry"]=s
s= marker_sets["particle_79 geometry"]
mark=s.place_marker((339.018, 3851.69, 3386.97), (0.7, 0.7, 0.7), 212.811)
if "particle_80 geometry" not in marker_sets:
s=new_marker_set('particle_80 geometry')
marker_sets["particle_80 geometry"]=s
s= marker_sets["particle_80 geometry"]
mark=s.place_marker((983.097, 4056.88, 3489.58), (0.7, 0.7, 0.7), 176.391)
if "particle_81 geometry" not in marker_sets:
s=new_marker_set('particle_81 geometry')
marker_sets["particle_81 geometry"]=s
s= marker_sets["particle_81 geometry"]
mark=s.place_marker((1563.42, 3669.46, 3255.36), (0.7, 0.7, 0.7), 99.3204)
if "particle_82 geometry" not in marker_sets:
s=new_marker_set('particle_82 geometry')
marker_sets["particle_82 geometry"]=s
s= marker_sets["particle_82 geometry"]
mark=s.place_marker((1835.7, 3352.13, 2803.52), (0.7, 0.7, 0.7), 166.62)
if "particle_83 geometry" not in marker_sets:
s=new_marker_set('particle_83 geometry')
marker_sets["particle_83 geometry"]=s
s= marker_sets["particle_83 geometry"]
mark=s.place_marker((1954.24, 3447.2, 2536.95), (0.7, 0.7, 0.7), 102.831)
if "particle_84 geometry" not in marker_sets:
s=new_marker_set('particle_84 geometry')
marker_sets["particle_84 geometry"]=s
s= marker_sets["particle_84 geometry"]
mark=s.place_marker((1538.75, 4072.52, 3088.89), (0.7, 0.7, 0.7), 65.0997)
if "particle_85 geometry" not in marker_sets:
s=new_marker_set('particle_85 geometry')
marker_sets["particle_85 geometry"]=s
s= marker_sets["particle_85 geometry"]
mark=s.place_marker((1286.28, 3637.44, 3096.98), (0.7, 0.7, 0.7), 92.1294)
if "particle_86 geometry" not in marker_sets:
s=new_marker_set('particle_86 geometry')
marker_sets["particle_86 geometry"]=s
s= marker_sets["particle_86 geometry"]
mark=s.place_marker((1263.9, 3114.04, 2964.49), (0.7, 0.7, 0.7), 194.791)
if "particle_87 geometry" not in marker_sets:
s=new_marker_set('particle_87 geometry')
marker_sets["particle_87 geometry"]=s
s= marker_sets["particle_87 geometry"]
mark=s.place_marker((1234.1, 2731.77, 2994.66), (0.7, 0.7, 0.7), 120.766)
if "particle_88 geometry" not in marker_sets:
s=new_marker_set('particle_88 geometry')
marker_sets["particle_88 geometry"]=s
s= marker_sets["particle_88 geometry"]
mark=s.place_marker((990.259, 3000.32, 3429.98), (0.7, 0.7, 0.7), 217.803)
if "particle_89 geometry" not in marker_sets:
s=new_marker_set('particle_89 geometry')
marker_sets["particle_89 geometry"]=s
s= marker_sets["particle_89 geometry"]
mark=s.place_marker((903.709, 3280.19, 3132.4), (0.7, 0.7, 0.7), 115.775)
if "particle_90 geometry" not in marker_sets:
s=new_marker_set('particle_90 geometry')
marker_sets["particle_90 geometry"]=s
s= marker_sets["particle_90 geometry"]
mark=s.place_marker((836.121, 3238.3, 2721.63), (0.7, 0.7, 0.7), 115.648)
if "particle_91 geometry" not in marker_sets:
s=new_marker_set('particle_91 geometry')
marker_sets["particle_91 geometry"]=s
s= marker_sets["particle_91 geometry"]
mark=s.place_marker((1111.23, 3070.23, 2634.69), (0.7, 0.7, 0.7), 83.8386)
if "particle_92 geometry" not in marker_sets:
s=new_marker_set('particle_92 geometry')
marker_sets["particle_92 geometry"]=s
s= marker_sets["particle_92 geometry"]
mark=s.place_marker((1204.54, 2844.8, 2905.17), (0.7, 0.7, 0.7), 124.32)
if "particle_93 geometry" not in marker_sets:
s=new_marker_set('particle_93 geometry')
marker_sets["particle_93 geometry"]=s
s= marker_sets["particle_93 geometry"]
mark=s.place_marker((1174.83, 2468.85, 3101.4), (0.7, 0.7, 0.7), 185.993)
if "particle_94 geometry" not in marker_sets:
s=new_marker_set('particle_94 geometry')
marker_sets["particle_94 geometry"]=s
s= marker_sets["particle_94 geometry"]
mark=s.place_marker((676.32, 2162.31, 3247.42), (0.7, 0.7, 0.7), 238.826)
if "particle_95 geometry" not in marker_sets:
s=new_marker_set('particle_95 geometry')
marker_sets["particle_95 geometry"]=s
s= marker_sets["particle_95 geometry"]
mark=s.place_marker((143.246, 2198.72, 3149.47), (0.7, 0.7, 0.7), 128.465)
if "particle_96 geometry" not in marker_sets:
s=new_marker_set('particle_96 geometry')
marker_sets["particle_96 geometry"]=s
s= marker_sets["particle_96 geometry"]
mark=s.place_marker((299.141, 2602.97, 2677), (0.7, 0.7, 0.7), 203.209)
if "particle_97 geometry" not in marker_sets:
s=new_marker_set('particle_97 geometry')
marker_sets["particle_97 geometry"]=s
s= marker_sets["particle_97 geometry"]
mark=s.place_marker((747.715, 2822.93, 2590.51), (0.7, 0.7, 0.7), 160.486)
if "particle_98 geometry" not in marker_sets:
s=new_marker_set('particle_98 geometry')
marker_sets["particle_98 geometry"]=s
s= marker_sets["particle_98 geometry"]
mark=s.place_marker((832.445, 2764.16, 2918.69), (0.7, 0.7, 0.7), 149.277)
if "particle_99 geometry" not in marker_sets:
s=new_marker_set('particle_99 geometry')
marker_sets["particle_99 geometry"]=s
s= marker_sets["particle_99 geometry"]
mark=s.place_marker((382.589, 2979.61, 3138.86), (0.7, 0.7, 0.7), 35.7435)
if "particle_100 geometry" not in marker_sets:
s=new_marker_set('particle_100 geometry')
marker_sets["particle_100 geometry"]=s
s= marker_sets["particle_100 geometry"]
mark=s.place_marker((1056.32, 3049.81, 2415.83), (0.7, 0.7, 0.7), 98.3898)
if "particle_101 geometry" not in marker_sets:
s=new_marker_set('particle_101 geometry')
marker_sets["particle_101 geometry"]=s
s= marker_sets["particle_101 geometry"]
mark=s.place_marker((1915.07, 3000.12, 1821.55), (0.7, 0.7, 0.7), 188.404)
if "particle_102 geometry" not in marker_sets:
s=new_marker_set('particle_102 geometry')
marker_sets["particle_102 geometry"]=s
s= marker_sets["particle_102 geometry"]
mark=s.place_marker((2406.63, 2873.91, 1883.44), (0.7, 0.7, 0.7), 110.318)
if "particle_103 geometry" not in marker_sets:
s=new_marker_set('particle_103 geometry')
marker_sets["particle_103 geometry"]=s
s= marker_sets["particle_103 geometry"]
mark=s.place_marker((2237.62, 3063.42, 2172.72), (0.7, 0.7, 0.7), 127.534)
if "particle_104 geometry" not in marker_sets:
s=new_marker_set('particle_104 geometry')
marker_sets["particle_104 geometry"]=s
s= marker_sets["particle_104 geometry"]
mark=s.place_marker((1928.38, 3160.72, 2372.54), (0.7, 0.7, 0.7), 91.368)
if "particle_105 geometry" not in marker_sets:
s=new_marker_set('particle_105 geometry')
marker_sets["particle_105 geometry"]=s
s= marker_sets["particle_105 geometry"]
mark=s.place_marker((1560.33, 3160.86, 2519.28), (0.7, 0.7, 0.7), 131.045)
if "particle_106 geometry" not in marker_sets:
s=new_marker_set('particle_106 geometry')
marker_sets["particle_106 geometry"]=s
s= marker_sets["particle_106 geometry"]
mark=s.place_marker((1179.35, 2987.66, 2537.76), (0.7, 0.7, 0.7), 143.608)
if "particle_107 geometry" not in marker_sets:
s=new_marker_set('particle_107 geometry')
marker_sets["particle_107 geometry"]=s
s= marker_sets["particle_107 geometry"]
mark=s.place_marker((1062.75, 2722.47, 2797.19), (0.7, 0.7, 0.7), 135.783)
if "particle_108 geometry" not in marker_sets:
s=new_marker_set('particle_108 geometry')
marker_sets["particle_108 geometry"]=s
s= marker_sets["particle_108 geometry"]
mark=s.place_marker((1005.27, 2505.14, 3042.03), (0.7, 0.7, 0.7), 92.5947)
if "particle_109 geometry" not in marker_sets:
s=new_marker_set('particle_109 geometry')
marker_sets["particle_109 geometry"]=s
s= marker_sets["particle_109 geometry"]
mark=s.place_marker((1267.86, 2495.15, 3121.58), (0.7, 0.7, 0.7), 150.123)
if "particle_110 geometry" not in marker_sets:
s=new_marker_set('particle_110 geometry')
marker_sets["particle_110 geometry"]=s
s= marker_sets["particle_110 geometry"]
mark=s.place_marker((1451.42, 2527.13, 3153.91), (0.7, 0.7, 0.7), 121.57)
if "particle_111 geometry" not in marker_sets:
s=new_marker_set('particle_111 geometry')
marker_sets["particle_111 geometry"]=s
s= marker_sets["particle_111 geometry"]
mark=s.place_marker((1476.78, 2538.53, 3479.94), (0.7, 0.7, 0.7), 104.777)
if "particle_112 geometry" not in marker_sets:
s=new_marker_set('particle_112 geometry')
marker_sets["particle_112 geometry"]=s
s= marker_sets["particle_112 geometry"]
mark=s.place_marker((1845.56, 2477.71, 3316.38), (0.7, 0.7, 0.7), 114.844)
if "particle_113 geometry" not in marker_sets:
s=new_marker_set('particle_113 geometry')
marker_sets["particle_113 geometry"]=s
s= marker_sets["particle_113 geometry"]
mark=s.place_marker((2244.07, 2415.59, 3142.49), (0.7, 0.7, 0.7), 150.588)
if "particle_114 geometry" not in marker_sets:
s=new_marker_set('particle_114 geometry')
marker_sets["particle_114 geometry"]=s
s= marker_sets["particle_114 geometry"]
mark=s.place_marker((2316.29, 2603.7, 2783.75), (0.7, 0.7, 0.7), 103.55)
if "particle_115 geometry" not in marker_sets:
s=new_marker_set('particle_115 geometry')
marker_sets["particle_115 geometry"]=s
s= marker_sets["particle_115 geometry"]
mark=s.place_marker((2332.56, 3093.46, 2554.64), (0.7, 0.7, 0.7), 215.392)
if "particle_116 geometry" not in marker_sets:
s=new_marker_set('particle_116 geometry')
marker_sets["particle_116 geometry"]=s
s= marker_sets["particle_116 geometry"]
mark=s.place_marker((2473.89, 3537.38, 2251.58), (0.7, 0.7, 0.7), 99.9126)
if "particle_117 geometry" not in marker_sets:
s=new_marker_set('particle_117 geometry')
marker_sets["particle_117 geometry"]=s
s= marker_sets["particle_117 geometry"]
mark=s.place_marker((2469.21, 4236.05, 2411.21), (0.7, 0.7, 0.7), 99.7857)
if "particle_118 geometry" not in marker_sets:
s=new_marker_set('particle_118 geometry')
marker_sets["particle_118 geometry"]=s
s= marker_sets["particle_118 geometry"]
mark=s.place_marker((2251.53, 4736.45, 2531.97), (0.7, 0.7, 0.7), 109.98)
if "particle_119 geometry" not in marker_sets:
s=new_marker_set('particle_119 geometry')
marker_sets["particle_119 geometry"]=s
s= marker_sets["particle_119 geometry"]
mark=s.place_marker((2368.29, 4260.2, 2693.66), (0.7, 0.7, 0.7), 102.831)
if "particle_120 geometry" not in marker_sets:
s=new_marker_set('particle_120 geometry')
marker_sets["particle_120 geometry"]=s
s= marker_sets["particle_120 geometry"]
mark=s.place_marker((2275.13, 3871.53, 2684.5), (0.7, 0.7, 0.7), 103.593)
if "particle_121 geometry" not in marker_sets:
s=new_marker_set('particle_121 geometry')
marker_sets["particle_121 geometry"]=s
s= marker_sets["particle_121 geometry"]
mark=s.place_marker((2020.08, 3462.47, 2716.05), (0.7, 0.7, 0.7), 173.472)
if "particle_122 geometry" not in marker_sets:
s=new_marker_set('particle_122 geometry')
marker_sets["particle_122 geometry"]=s
s= marker_sets["particle_122 geometry"]
mark=s.place_marker((1500.36, 3362.85, 2885.94), (0.7, 0.7, 0.7), 113.575)
if "particle_123 geometry" not in marker_sets:
s=new_marker_set('particle_123 geometry')
marker_sets["particle_123 geometry"]=s
s= marker_sets["particle_123 geometry"]
mark=s.place_marker((1259.77, 2968.56, 2972.12), (0.7, 0.7, 0.7), 128.296)
if "particle_124 geometry" not in marker_sets:
s=new_marker_set('particle_124 geometry')
marker_sets["particle_124 geometry"]=s
s= marker_sets["particle_124 geometry"]
mark=s.place_marker((1053.81, 2609.97, 3144.2), (0.7, 0.7, 0.7), 145.004)
if "particle_125 geometry" not in marker_sets:
s=new_marker_set('particle_125 geometry')
marker_sets["particle_125 geometry"]=s
s= marker_sets["particle_125 geometry"]
mark=s.place_marker((999.094, 2108.74, 3270.01), (0.7, 0.7, 0.7), 148.261)
if "particle_126 geometry" not in marker_sets:
s=new_marker_set('particle_126 geometry')
marker_sets["particle_126 geometry"]=s
s= marker_sets["particle_126 geometry"]
mark=s.place_marker((612.645, 1692.22, 3524.53), (0.7, 0.7, 0.7), 127.704)
if "particle_127 geometry" not in marker_sets:
s=new_marker_set('particle_127 geometry')
marker_sets["particle_127 geometry"]=s
s= marker_sets["particle_127 geometry"]
mark=s.place_marker((116.758, 1432.4, 3645.6), (0.7, 0.7, 0.7), 129.607)
if "particle_128 geometry" not in marker_sets:
s=new_marker_set('particle_128 geometry')
marker_sets["particle_128 geometry"]=s
s= marker_sets["particle_128 geometry"]
mark=s.place_marker((76.0674, 1873.85, 3398.65), (0.7, 0.7, 0.7), 139.759)
if "particle_129 geometry" not in marker_sets:
s=new_marker_set('particle_129 geometry')
marker_sets["particle_129 geometry"]=s
s= marker_sets["particle_129 geometry"]
mark=s.place_marker((298.817, 2372.06, 2990.11), (0.7, 0.7, 0.7), 118.567)
if "particle_130 geometry" not in marker_sets:
s=new_marker_set('particle_130 geometry')
marker_sets["particle_130 geometry"]=s
s= marker_sets["particle_130 geometry"]
mark=s.place_marker((545.811, 2748.47, 3070.03), (0.7, 0.7, 0.7), 136.164)
if "particle_131 geometry" not in marker_sets:
s=new_marker_set('particle_131 geometry')
marker_sets["particle_131 geometry"]=s
s= marker_sets["particle_131 geometry"]
mark=s.place_marker((950.358, 2989.57, 3069), (0.7, 0.7, 0.7), 121.655)
if "particle_132 geometry" not in marker_sets:
s=new_marker_set('particle_132 geometry')
marker_sets["particle_132 geometry"]=s
s= marker_sets["particle_132 geometry"]
mark=s.place_marker((1357.78, 3117.15, 3180.76), (0.7, 0.7, 0.7), 127.492)
if "particle_133 geometry" not in marker_sets:
s=new_marker_set('particle_133 geometry')
marker_sets["particle_133 geometry"]=s
s= marker_sets["particle_133 geometry"]
mark=s.place_marker((1544.04, 3373.58, 3486.51), (0.7, 0.7, 0.7), 138.617)
if "particle_134 geometry" not in marker_sets:
s=new_marker_set('particle_134 geometry')
marker_sets["particle_134 geometry"]=s
s= marker_sets["particle_134 geometry"]
mark=s.place_marker((1924.96, 3402.21, 3524.55), (0.7, 0.7, 0.7), 120.766)
if "particle_135 geometry" not in marker_sets:
s=new_marker_set('particle_135 geometry')
marker_sets["particle_135 geometry"]=s
s= marker_sets["particle_135 geometry"]
mark=s.place_marker((2181.47, 3495.47, 3317.95), (0.7, 0.7, 0.7), 145.893)
if "particle_136 geometry" not in marker_sets:
s=new_marker_set('particle_136 geometry')
marker_sets["particle_136 geometry"]=s
s= marker_sets["particle_136 geometry"]
mark=s.place_marker((2155.08, 3330.92, 2866.17), (0.7, 0.7, 0.7), 185.02)
if "particle_137 geometry" not in marker_sets:
s=new_marker_set('particle_137 geometry')
marker_sets["particle_137 geometry"]=s
s= marker_sets["particle_137 geometry"]
mark=s.place_marker((2303.98, 3032.53, 2451.49), (0.7, 0.7, 0.7), 221.314)
if "particle_138 geometry" not in marker_sets:
s=new_marker_set('particle_138 geometry')
marker_sets["particle_138 geometry"]=s
s= marker_sets["particle_138 geometry"]
mark=s.place_marker((2492.22, 2616.6, 2275.78), (0.7, 0.7, 0.7), 165.139)
if "particle_139 geometry" not in marker_sets:
s=new_marker_set('particle_139 geometry')
marker_sets["particle_139 geometry"]=s
s= marker_sets["particle_139 geometry"]
mark=s.place_marker((2352.93, 2699.76, 2131.67), (0.7, 0.7, 0.7), 179.437)
if "particle_140 geometry" not in marker_sets:
s=new_marker_set('particle_140 geometry')
marker_sets["particle_140 geometry"]=s
s= marker_sets["particle_140 geometry"]
mark=s.place_marker((2306.26, 3109.75, 2101.54), (0.7, 0.7, 0.7), 137.898)
if "particle_141 geometry" not in marker_sets:
s=new_marker_set('particle_141 geometry')
marker_sets["particle_141 geometry"]=s
s= marker_sets["particle_141 geometry"]
mark=s.place_marker((2226.46, 3400.74, 2236.07), (0.7, 0.7, 0.7), 124.658)
if "particle_142 geometry" not in marker_sets:
s=new_marker_set('particle_142 geometry')
marker_sets["particle_142 geometry"]=s
s= marker_sets["particle_142 geometry"]
mark=s.place_marker((2236.56, 3520.48, 2573.84), (0.7, 0.7, 0.7), 97.7553)
if "particle_143 geometry" not in marker_sets:
s=new_marker_set('particle_143 geometry')
marker_sets["particle_143 geometry"]=s
s= marker_sets["particle_143 geometry"]
mark=s.place_marker((2218.79, 3659.43, 2847.55), (0.7, 0.7, 0.7), 92.9331)
if "particle_144 geometry" not in marker_sets:
s=new_marker_set('particle_144 geometry')
marker_sets["particle_144 geometry"]=s
s= marker_sets["particle_144 geometry"]
mark=s.place_marker((2097.18, 3879.61, 3064.03), (0.7, 0.7, 0.7), 123.135)
if "particle_145 geometry" not in marker_sets:
s=new_marker_set('particle_145 geometry')
marker_sets["particle_145 geometry"]=s
s= marker_sets["particle_145 geometry"]
mark=s.place_marker((2157.87, 3637.06, 2760.63), (0.7, 0.7, 0.7), 125.716)
if "particle_146 geometry" not in marker_sets:
s=new_marker_set('particle_146 geometry')
marker_sets["particle_146 geometry"]=s
s= marker_sets["particle_146 geometry"]
mark=s.place_marker((2125.35, 3314.77, 2666.76), (0.7, 0.7, 0.7), 127.534)
if "particle_147 geometry" not in marker_sets:
s=new_marker_set('particle_147 geometry')
marker_sets["particle_147 geometry"]=s
s= marker_sets["particle_147 geometry"]
mark=s.place_marker((1893.61, 3110.36, 2725.09), (0.7, 0.7, 0.7), 94.9212)
if "particle_148 geometry" not in marker_sets:
s=new_marker_set('particle_148 geometry')
marker_sets["particle_148 geometry"]=s
s= marker_sets["particle_148 geometry"]
mark=s.place_marker((1965.8, 2748.72, 2475.26), (0.7, 0.7, 0.7), 137.644)
if "particle_149 geometry" not in marker_sets:
s=new_marker_set('particle_149 geometry')
marker_sets["particle_149 geometry"]=s
s= marker_sets["particle_149 geometry"]
mark=s.place_marker((1990.99, 2398.52, 2378.59), (0.7, 0.7, 0.7), 149.277)
if "particle_150 geometry" not in marker_sets:
s=new_marker_set('particle_150 geometry')
marker_sets["particle_150 geometry"]=s
s= marker_sets["particle_150 geometry"]
mark=s.place_marker((2158.14, 2378.32, 2689.85), (0.7, 0.7, 0.7), 103.677)
if "particle_151 geometry" not in marker_sets:
s=new_marker_set('particle_151 geometry')
marker_sets["particle_151 geometry"]=s
s= marker_sets["particle_151 geometry"]
mark=s.place_marker((2167.18, 2424.1, 3168.76), (0.7, 0.7, 0.7), 99.6588)
if "particle_152 geometry" not in marker_sets:
s=new_marker_set('particle_152 geometry')
marker_sets["particle_152 geometry"]=s
s= marker_sets["particle_152 geometry"]
mark=s.place_marker((2157.97, 2433.14, 3539.86), (0.7, 0.7, 0.7), 134.133)
if "particle_153 geometry" not in marker_sets:
s=new_marker_set('particle_153 geometry')
marker_sets["particle_153 geometry"]=s
s= marker_sets["particle_153 geometry"]
mark=s.place_marker((2298.75, 2645.27, 3311.88), (0.7, 0.7, 0.7), 173.007)
if "particle_154 geometry" not in marker_sets:
s=new_marker_set('particle_154 geometry')
marker_sets["particle_154 geometry"]=s
s= marker_sets["particle_154 geometry"]
mark=s.place_marker((2400.44, 2541.16, 2760.7), (0.7, 0.7, 0.7), 141.028)
if "particle_155 geometry" not in marker_sets:
s=new_marker_set('particle_155 geometry')
marker_sets["particle_155 geometry"]=s
s= marker_sets["particle_155 geometry"]
mark=s.place_marker((2410.32, 2514.43, 2296.58), (0.7, 0.7, 0.7), 161.121)
if "particle_156 geometry" not in marker_sets:
s=new_marker_set('particle_156 geometry')
marker_sets["particle_156 geometry"]=s
s= marker_sets["particle_156 geometry"]
mark=s.place_marker((2083.54, 2500.88, 2177.4), (0.7, 0.7, 0.7), 119.582)
if "particle_157 geometry" not in marker_sets:
s=new_marker_set('particle_157 geometry')
marker_sets["particle_157 geometry"]=s
s= marker_sets["particle_157 geometry"]
mark=s.place_marker((1845.47, 2690.37, 2441.19), (0.7, 0.7, 0.7), 137.094)
if "particle_158 geometry" not in marker_sets:
s=new_marker_set('particle_158 geometry')
marker_sets["particle_158 geometry"]=s
s= marker_sets["particle_158 geometry"]
mark=s.place_marker((1650.27, 3095.59, 2664.15), (0.7, 0.7, 0.7), 149.234)
if "particle_159 geometry" not in marker_sets:
s=new_marker_set('particle_159 geometry')
marker_sets["particle_159 geometry"]=s
s= marker_sets["particle_159 geometry"]
mark=s.place_marker((1872.74, 3377.7, 2407.58), (0.7, 0.7, 0.7), 151.011)
if "particle_160 geometry" not in marker_sets:
s=new_marker_set('particle_160 geometry')
marker_sets["particle_160 geometry"]=s
s= marker_sets["particle_160 geometry"]
mark=s.place_marker((2280.71, 3368.37, 2073.57), (0.7, 0.7, 0.7), 184.216)
if "particle_161 geometry" not in marker_sets:
s=new_marker_set('particle_161 geometry')
marker_sets["particle_161 geometry"]=s
s= marker_sets["particle_161 geometry"]
mark=s.place_marker((2648.43, 3351.46, 2273.16), (0.7, 0.7, 0.7), 170.596)
if "particle_162 geometry" not in marker_sets:
s=new_marker_set('particle_162 geometry')
marker_sets["particle_162 geometry"]=s
s= marker_sets["particle_162 geometry"]
mark=s.place_marker((2496.19, 3754.85, 2743.36), (0.7, 0.7, 0.7), 215.603)
if "particle_163 geometry" not in marker_sets:
s=new_marker_set('particle_163 geometry')
marker_sets["particle_163 geometry"]=s
s= marker_sets["particle_163 geometry"]
mark=s.place_marker((2209.3, 4342.91, 3347.76), (0.7, 0.7, 0.7), 79.0164)
if "particle_164 geometry" not in marker_sets:
s=new_marker_set('particle_164 geometry')
marker_sets["particle_164 geometry"]=s
s= marker_sets["particle_164 geometry"]
mark=s.place_marker((2303.1, 4217.61, 3650.6), (0.7, 0.7, 0.7), 77.2821)
if "particle_165 geometry" not in marker_sets:
s=new_marker_set('particle_165 geometry')
marker_sets["particle_165 geometry"]=s
s= marker_sets["particle_165 geometry"]
mark=s.place_marker((2308.75, 3865.36, 3627.95), (0.7, 0.7, 0.7), 188.658)
if "particle_166 geometry" not in marker_sets:
s=new_marker_set('particle_166 geometry')
marker_sets["particle_166 geometry"]=s
s= marker_sets["particle_166 geometry"]
mark=s.place_marker((2620.87, 3847.94, 3657.25), (0.7, 0.7, 0.7), 115.437)
if "particle_167 geometry" not in marker_sets:
s=new_marker_set('particle_167 geometry')
marker_sets["particle_167 geometry"]=s
s= marker_sets["particle_167 geometry"]
mark=s.place_marker((2578.85, 3536.73, 3145.15), (0.7, 0.7, 0.7), 88.4916)
if "particle_168 geometry" not in marker_sets:
s=new_marker_set('particle_168 geometry')
marker_sets["particle_168 geometry"]=s
s= marker_sets["particle_168 geometry"]
mark=s.place_marker((2520.93, 3215.07, 2604.89), (0.7, 0.7, 0.7), 108.88)
if "particle_169 geometry" not in marker_sets:
s=new_marker_set('particle_169 geometry')
marker_sets["particle_169 geometry"]=s
s= marker_sets["particle_169 geometry"]
mark=s.place_marker((2471.81, 3236.34, 2240.6), (0.7, 0.7, 0.7), 172.119)
if "particle_170 geometry" not in marker_sets:
s=new_marker_set('particle_170 geometry')
marker_sets["particle_170 geometry"]=s
s= marker_sets["particle_170 geometry"]
mark=s.place_marker((2537.8, 3658.88, 2496.64), (0.7, 0.7, 0.7), 139.505)
if "particle_171 geometry" not in marker_sets:
s=new_marker_set('particle_171 geometry')
marker_sets["particle_171 geometry"]=s
s= marker_sets["particle_171 geometry"]
mark=s.place_marker((2588.06, 4097.8, 2771.9), (0.7, 0.7, 0.7), 92.7639)
if "particle_172 geometry" not in marker_sets:
s=new_marker_set('particle_172 geometry')
marker_sets["particle_172 geometry"]=s
s= marker_sets["particle_172 geometry"]
mark=s.place_marker((2622.48, 4202.48, 2522.73), (0.7, 0.7, 0.7), 89.8452)
if "particle_173 geometry" not in marker_sets:
s=new_marker_set('particle_173 geometry')
marker_sets["particle_173 geometry"]=s
s= marker_sets["particle_173 geometry"]
mark=s.place_marker((2861.82, 4054, 2382.47), (0.7, 0.7, 0.7), 149.446)
if "particle_174 geometry" not in marker_sets:
s=new_marker_set('particle_174 geometry')
marker_sets["particle_174 geometry"]=s
s= marker_sets["particle_174 geometry"]
mark=s.place_marker((3135.34, 4121.9, 2540.82), (0.7, 0.7, 0.7), 126.858)
if "particle_175 geometry" not in marker_sets:
s=new_marker_set('particle_175 geometry')
marker_sets["particle_175 geometry"]=s
s= marker_sets["particle_175 geometry"]
mark=s.place_marker((2907.7, 4326.92, 2669.39), (0.7, 0.7, 0.7), 106.046)
if "particle_176 geometry" not in marker_sets:
s=new_marker_set('particle_176 geometry')
marker_sets["particle_176 geometry"]=s
s= marker_sets["particle_176 geometry"]
mark=s.place_marker((2422.11, 4475.45, 2583.54), (0.7, 0.7, 0.7), 156.298)
if "particle_177 geometry" not in marker_sets:
s=new_marker_set('particle_177 geometry')
marker_sets["particle_177 geometry"]=s
s= marker_sets["particle_177 geometry"]
mark=s.place_marker((1827.54, 4578.02, 2545.37), (0.7, 0.7, 0.7), 231.212)
if "particle_178 geometry" not in marker_sets:
s=new_marker_set('particle_178 geometry')
marker_sets["particle_178 geometry"]=s
s= marker_sets["particle_178 geometry"]
mark=s.place_marker((1603.44, 4380.43, 2100.59), (0.7, 0.7, 0.7), 88.4916)
if "particle_179 geometry" not in marker_sets:
s=new_marker_set('particle_179 geometry')
marker_sets["particle_179 geometry"]=s
s= marker_sets["particle_179 geometry"]
mark=s.place_marker((1760.52, 4016.59, 1801.57), (0.7, 0.7, 0.7), 111.334)
if "particle_180 geometry" not in marker_sets:
s=new_marker_set('particle_180 geometry')
marker_sets["particle_180 geometry"]=s
s= marker_sets["particle_180 geometry"]
mark=s.place_marker((2112.39, 3543.97, 1769.24), (0.7, 0.7, 0.7), 127.619)
if "particle_181 geometry" not in marker_sets:
s=new_marker_set('particle_181 geometry')
marker_sets["particle_181 geometry"]=s
s= marker_sets["particle_181 geometry"]
mark=s.place_marker((2255.93, 3140.83, 1869.99), (0.7, 0.7, 0.7), 230.746)
if "particle_182 geometry" not in marker_sets:
s=new_marker_set('particle_182 geometry')
marker_sets["particle_182 geometry"]=s
s= marker_sets["particle_182 geometry"]
mark=s.place_marker((2091.13, 3306.44, 2229.41), (0.7, 0.7, 0.7), 124.573)
if "particle_183 geometry" not in marker_sets:
s=new_marker_set('particle_183 geometry')
marker_sets["particle_183 geometry"]=s
s= marker_sets["particle_183 geometry"]
mark=s.place_marker((1891.97, 3787.55, 2586.69), (0.7, 0.7, 0.7), 124.489)
if "particle_184 geometry" not in marker_sets:
s=new_marker_set('particle_184 geometry')
marker_sets["particle_184 geometry"]=s
s= marker_sets["particle_184 geometry"]
mark=s.place_marker((2040.86, 3857.42, 2935.75), (0.7, 0.7, 0.7), 196.61)
if "particle_185 geometry" not in marker_sets:
s=new_marker_set('particle_185 geometry')
marker_sets["particle_185 geometry"]=s
s= marker_sets["particle_185 geometry"]
mark=s.place_marker((2276.45, 3905.57, 2693.96), (0.7, 0.7, 0.7), 134.049)
if "particle_186 geometry" not in marker_sets:
s=new_marker_set('particle_186 geometry')
marker_sets["particle_186 geometry"]=s
s= marker_sets["particle_186 geometry"]
mark=s.place_marker((2264.23, 4110.76, 2458.89), (0.7, 0.7, 0.7), 141.493)
if "particle_187 geometry" not in marker_sets:
s=new_marker_set('particle_187 geometry')
marker_sets["particle_187 geometry"]=s
s= marker_sets["particle_187 geometry"]
mark=s.place_marker((2144.63, 4484.79, 2381.01), (0.7, 0.7, 0.7), 172.203)
if "particle_188 geometry" not in marker_sets:
s=new_marker_set('particle_188 geometry')
marker_sets["particle_188 geometry"]=s
s= marker_sets["particle_188 geometry"]
mark=s.place_marker((2390.04, 4015.58, 2759.02), (0.7, 0.7, 0.7), 271.354)
if "particle_189 geometry" not in marker_sets:
s=new_marker_set('particle_189 geometry')
marker_sets["particle_189 geometry"]=s
s= marker_sets["particle_189 geometry"]
mark=s.place_marker((2272.08, 3589.26, 3056.3), (0.7, 0.7, 0.7), 97.0785)
if "particle_190 geometry" not in marker_sets:
s=new_marker_set('particle_190 geometry')
marker_sets["particle_190 geometry"]=s
s= marker_sets["particle_190 geometry"]
mark=s.place_marker((2002.46, 3324.22, 3280), (0.7, 0.7, 0.7), 151.857)
if "particle_191 geometry" not in marker_sets:
s=new_marker_set('particle_191 geometry')
marker_sets["particle_191 geometry"]=s
s= marker_sets["particle_191 geometry"]
mark=s.place_marker((1873.42, 2842.27, 3629.71), (0.7, 0.7, 0.7), 199.233)
if "particle_192 geometry" not in marker_sets:
s=new_marker_set('particle_192 geometry')
marker_sets["particle_192 geometry"]=s
s= marker_sets["particle_192 geometry"]
mark=s.place_marker((2033.26, 2288.75, 3427.07), (0.7, 0.7, 0.7), 118.863)
if "particle_193 geometry" not in marker_sets:
s=new_marker_set('particle_193 geometry')
marker_sets["particle_193 geometry"]=s
s= marker_sets["particle_193 geometry"]
mark=s.place_marker((1862.86, 1845.52, 3453.47), (0.7, 0.7, 0.7), 172.415)
if "particle_194 geometry" not in marker_sets:
s=new_marker_set('particle_194 geometry')
marker_sets["particle_194 geometry"]=s
s= marker_sets["particle_194 geometry"]
mark=s.place_marker((1513.56, 1532.78, 3725.9), (0.7, 0.7, 0.7), 134.26)
if "particle_195 geometry" not in marker_sets:
s=new_marker_set('particle_195 geometry')
marker_sets["particle_195 geometry"]=s
s= marker_sets["particle_195 geometry"]
mark=s.place_marker((932.506, 1252.82, 4419.26), (0.7, 0.7, 0.7), 139.548)
if "particle_196 geometry" not in marker_sets:
s=new_marker_set('particle_196 geometry')
marker_sets["particle_196 geometry"]=s
s= marker_sets["particle_196 geometry"]
mark=s.place_marker((587.625, 1606.74, 4198.83), (0.7, 0.7, 0.7), 196.526)
if "particle_197 geometry" not in marker_sets:
s=new_marker_set('particle_197 geometry')
marker_sets["particle_197 geometry"]=s
s= marker_sets["particle_197 geometry"]
mark=s.place_marker((735.495, 2134.83, 3656.36), (0.7, 0.7, 0.7), 136.206)
if "particle_198 geometry" not in marker_sets:
s=new_marker_set('particle_198 geometry')
marker_sets["particle_198 geometry"]=s
s= marker_sets["particle_198 geometry"]
mark=s.place_marker((1196.14, 2364.11, 2839.04), (0.7, 0.7, 0.7), 152.322)
if "particle_199 geometry" not in marker_sets:
s=new_marker_set('particle_199 geometry')
marker_sets["particle_199 geometry"]=s
s= marker_sets["particle_199 geometry"]
mark=s.place_marker((1657.26, 2520.99, 2403.31), (0.7, 0.7, 0.7), 126.054)
if "particle_200 geometry" not in marker_sets:
s=new_marker_set('particle_200 geometry')
marker_sets["particle_200 geometry"]=s
s= marker_sets["particle_200 geometry"]
mark=s.place_marker((1676.5, 2908.27, 2550.62), (0.7, 0.7, 0.7), 164.378)
if "particle_201 geometry" not in marker_sets:
s=new_marker_set('particle_201 geometry')
marker_sets["particle_201 geometry"]=s
s= marker_sets["particle_201 geometry"]
mark=s.place_marker((1719.04, 3139.46, 2920.3), (0.7, 0.7, 0.7), 122.205)
if "particle_202 geometry" not in marker_sets:
s=new_marker_set('particle_202 geometry')
marker_sets["particle_202 geometry"]=s
s= marker_sets["particle_202 geometry"]
mark=s.place_marker((1909.92, 3229.08, 3310.38), (0.7, 0.7, 0.7), 134.979)
if "particle_203 geometry" not in marker_sets:
s=new_marker_set('particle_203 geometry')
marker_sets["particle_203 geometry"]=s
s= marker_sets["particle_203 geometry"]
mark=s.place_marker((2151.6, 2958.39, 3336.8), (0.7, 0.7, 0.7), 136.375)
if "particle_204 geometry" not in marker_sets:
s=new_marker_set('particle_204 geometry')
marker_sets["particle_204 geometry"]=s
s= marker_sets["particle_204 geometry"]
mark=s.place_marker((1933.77, 3075.37, 3185.91), (0.7, 0.7, 0.7), 151.688)
if "particle_205 geometry" not in marker_sets:
s=new_marker_set('particle_205 geometry')
marker_sets["particle_205 geometry"]=s
s= marker_sets["particle_205 geometry"]
mark=s.place_marker((2063.67, 3286.27, 3274.45), (0.7, 0.7, 0.7), 116.156)
if "particle_206 geometry" not in marker_sets:
s=new_marker_set('particle_206 geometry')
marker_sets["particle_206 geometry"]=s
s= marker_sets["particle_206 geometry"]
mark=s.place_marker((2268.56, 2909.31, 2708.86), (0.7, 0.7, 0.7), 122.839)
if "particle_207 geometry" not in marker_sets:
s=new_marker_set('particle_207 geometry')
marker_sets["particle_207 geometry"]=s
s= marker_sets["particle_207 geometry"]
mark=s.place_marker((2149.45, 2656.41, 2290.36), (0.7, 0.7, 0.7), 164.716)
if "particle_208 geometry" not in marker_sets:
s=new_marker_set('particle_208 geometry')
marker_sets["particle_208 geometry"]=s
s= marker_sets["particle_208 geometry"]
mark=s.place_marker((1478.16, 2903.33, 2749.59), (0.7, 0.7, 0.7), 303.672)
if "particle_209 geometry" not in marker_sets:
s=new_marker_set('particle_209 geometry')
marker_sets["particle_209 geometry"]=s
s= marker_sets["particle_209 geometry"]
mark=s.place_marker((1059.88, 3245.18, 3655.4), (0.7, 0.7, 0.7), 220.298)
if "particle_210 geometry" not in marker_sets:
s=new_marker_set('particle_210 geometry')
marker_sets["particle_210 geometry"]=s
s= marker_sets["particle_210 geometry"]
mark=s.place_marker((1556.87, 3647.26, 3667.06), (0.7, 0.7, 0.7), 175.883)
if "particle_211 geometry" not in marker_sets:
s=new_marker_set('particle_211 geometry')
marker_sets["particle_211 geometry"]=s
s= marker_sets["particle_211 geometry"]
mark=s.place_marker((1888.44, 4151.65, 3327.18), (0.7, 0.7, 0.7), 233.581)
if "particle_212 geometry" not in marker_sets:
s=new_marker_set('particle_212 geometry')
marker_sets["particle_212 geometry"]=s
s= marker_sets["particle_212 geometry"]
mark=s.place_marker((1882.88, 4378.68, 2574.92), (0.7, 0.7, 0.7), 231.127)
if "particle_213 geometry" not in marker_sets:
s=new_marker_set('particle_213 geometry')
marker_sets["particle_213 geometry"]=s
s= marker_sets["particle_213 geometry"]
mark=s.place_marker((2288.16, 4677.66, 2208.19), (0.7, 0.7, 0.7), 247.413)
if "particle_214 geometry" not in marker_sets:
s=new_marker_set('particle_214 geometry')
marker_sets["particle_214 geometry"]=s
s= marker_sets["particle_214 geometry"]
mark=s.place_marker((2878.79, 4934.36, 2245.29), (0.7, 0.7, 0.7), 200.206)
if "particle_215 geometry" not in marker_sets:
s=new_marker_set('particle_215 geometry')
marker_sets["particle_215 geometry"]=s
s= marker_sets["particle_215 geometry"]
mark=s.place_marker((3073.22, 4859.6, 2622.72), (0.7, 0.7, 0.7), 150.419)
if "particle_216 geometry" not in marker_sets:
s=new_marker_set('particle_216 geometry')
marker_sets["particle_216 geometry"]=s
s= marker_sets["particle_216 geometry"]
mark=s.place_marker((2963.24, 4362.08, 2279.28), (0.7, 0.7, 0.7), 140.14)
if "particle_217 geometry" not in marker_sets:
s=new_marker_set('particle_217 geometry')
marker_sets["particle_217 geometry"]=s
s= marker_sets["particle_217 geometry"]
mark=s.place_marker((2801.35, 4188.46, 1876.49), (0.7, 0.7, 0.7), 132.949)
if "particle_218 geometry" not in marker_sets:
s=new_marker_set('particle_218 geometry')
marker_sets["particle_218 geometry"]=s
s= marker_sets["particle_218 geometry"]
mark=s.place_marker((2680.87, 3896.5, 1628.46), (0.7, 0.7, 0.7), 141.113)
if "particle_219 geometry" not in marker_sets:
s=new_marker_set('particle_219 geometry')
marker_sets["particle_219 geometry"]=s
s= marker_sets["particle_219 geometry"]
mark=s.place_marker((2315.16, 3910.45, 1661.13), (0.7, 0.7, 0.7), 171.526)
if "particle_220 geometry" not in marker_sets:
s=new_marker_set('particle_220 geometry')
marker_sets["particle_220 geometry"]=s
s= marker_sets["particle_220 geometry"]
mark=s.place_marker((1924.1, 4195.26, 2048.17), (0.7, 0.7, 0.7), 326.937)
if "particle_221 geometry" not in marker_sets:
s=new_marker_set('particle_221 geometry')
marker_sets["particle_221 geometry"]=s
s= marker_sets["particle_221 geometry"]
mark=s.place_marker((1728.02, 4109.7, 2598.84), (0.7, 0.7, 0.7), 92.0871)
if "particle_222 geometry" not in marker_sets:
s=new_marker_set('particle_222 geometry')
marker_sets["particle_222 geometry"]=s
s= marker_sets["particle_222 geometry"]
mark=s.place_marker((1587.48, 3646.99, 2571.48), (0.7, 0.7, 0.7), 210.273)
if "particle_223 geometry" not in marker_sets:
s=new_marker_set('particle_223 geometry')
marker_sets["particle_223 geometry"]=s
s= marker_sets["particle_223 geometry"]
mark=s.place_marker((1885.54, 3131.32, 2107.2), (0.7, 0.7, 0.7), 122.628)
if "particle_224 geometry" not in marker_sets:
s=new_marker_set('particle_224 geometry')
marker_sets["particle_224 geometry"]=s
s= marker_sets["particle_224 geometry"]
mark=s.place_marker((2043.85, 3045.21, 1936.68), (0.7, 0.7, 0.7), 109.176)
if "particle_225 geometry" not in marker_sets:
s=new_marker_set('particle_225 geometry')
marker_sets["particle_225 geometry"]=s
s= marker_sets["particle_225 geometry"]
mark=s.place_marker((2087.8, 3242.63, 2169.16), (0.7, 0.7, 0.7), 142.213)
if "particle_226 geometry" not in marker_sets:
s=new_marker_set('particle_226 geometry')
marker_sets["particle_226 geometry"]=s
s= marker_sets["particle_226 geometry"]
mark=s.place_marker((1800.24, 3334.7, 2304), (0.7, 0.7, 0.7), 250.078)
if "particle_227 geometry" not in marker_sets:
s=new_marker_set('particle_227 geometry')
marker_sets["particle_227 geometry"]=s
s= marker_sets["particle_227 geometry"]
mark=s.place_marker((2147.22, 3209.25, 2574.52), (0.7, 0.7, 0.7), 123.558)
if "particle_228 geometry" not in marker_sets:
s=new_marker_set('particle_228 geometry')
marker_sets["particle_228 geometry"]=s
s= marker_sets["particle_228 geometry"]
mark=s.place_marker((2542.35, 2942.27, 2566.12), (0.7, 0.7, 0.7), 235.992)
if "particle_229 geometry" not in marker_sets:
s=new_marker_set('particle_229 geometry')
marker_sets["particle_229 geometry"]=s
s= marker_sets["particle_229 geometry"]
mark=s.place_marker((2882.96, 2636.79, 2737.66), (0.7, 0.7, 0.7), 172.373)
if "particle_230 geometry" not in marker_sets:
s=new_marker_set('particle_230 geometry')
marker_sets["particle_230 geometry"]=s
s= marker_sets["particle_230 geometry"]
mark=s.place_marker((2929.12, 2663.24, 3191.36), (0.7, 0.7, 0.7), 152.322)
if "particle_231 geometry" not in marker_sets:
s=new_marker_set('particle_231 geometry')
marker_sets["particle_231 geometry"]=s
s= marker_sets["particle_231 geometry"]
mark=s.place_marker((2970.89, 2821.3, 3462.89), (0.7, 0.7, 0.7), 196.653)
if "particle_232 geometry" not in marker_sets:
s=new_marker_set('particle_232 geometry')
marker_sets["particle_232 geometry"]=s
s= marker_sets["particle_232 geometry"]
mark=s.place_marker((3234.31, 2721.27, 3257.39), (0.7, 0.7, 0.7), 134.091)
if "particle_233 geometry" not in marker_sets:
s=new_marker_set('particle_233 geometry')
marker_sets["particle_233 geometry"]=s
s= marker_sets["particle_233 geometry"]
mark=s.place_marker((3404.07, 2783.91, 2982.65), (0.7, 0.7, 0.7), 180.325)
if "particle_234 geometry" not in marker_sets:
s=new_marker_set('particle_234 geometry')
marker_sets["particle_234 geometry"]=s
s= marker_sets["particle_234 geometry"]
mark=s.place_marker((2964.11, 2791.09, 2843.68), (0.7, 0.7, 0.7), 218.437)
if "particle_235 geometry" not in marker_sets:
s=new_marker_set('particle_235 geometry')
marker_sets["particle_235 geometry"]=s
s= marker_sets["particle_235 geometry"]
mark=s.place_marker((2592.13, 3060.16, 2905.59), (0.7, 0.7, 0.7), 148.008)
if "particle_236 geometry" not in marker_sets:
s=new_marker_set('particle_236 geometry')
marker_sets["particle_236 geometry"]=s
s= marker_sets["particle_236 geometry"]
mark=s.place_marker((2372.34, 3582.04, 3197.04), (0.7, 0.7, 0.7), 191.873)
if "particle_237 geometry" not in marker_sets:
s=new_marker_set('particle_237 geometry')
marker_sets["particle_237 geometry"]=s
s= marker_sets["particle_237 geometry"]
mark=s.place_marker((2283.33, 4121.25, 3225.29), (0.7, 0.7, 0.7), 138.575)
if "particle_238 geometry" not in marker_sets:
s=new_marker_set('particle_238 geometry')
marker_sets["particle_238 geometry"]=s
s= marker_sets["particle_238 geometry"]
mark=s.place_marker((2517.63, 4377, 3470.65), (0.7, 0.7, 0.7), 161.205)
if "particle_239 geometry" not in marker_sets:
s=new_marker_set('particle_239 geometry')
marker_sets["particle_239 geometry"]=s
s= marker_sets["particle_239 geometry"]
mark=s.place_marker((2310.47, 3968.27, 3542.91), (0.7, 0.7, 0.7), 288.021)
if "particle_240 geometry" not in marker_sets:
s=new_marker_set('particle_240 geometry')
marker_sets["particle_240 geometry"]=s
s= marker_sets["particle_240 geometry"]
mark=s.place_marker((2808.07, 3484.72, 3376.35), (0.7, 0.7, 0.7), 227.405)
if "particle_241 geometry" not in marker_sets:
s=new_marker_set('particle_241 geometry')
marker_sets["particle_241 geometry"]=s
s= marker_sets["particle_241 geometry"]
mark=s.place_marker((2944.55, 3033.11, 3155.72), (0.7, 0.7, 0.7), 126.519)
if "particle_242 geometry" not in marker_sets:
s=new_marker_set('particle_242 geometry')
marker_sets["particle_242 geometry"]=s
s= marker_sets["particle_242 geometry"]
mark=s.place_marker((2888.08, 3042.43, 3449.37), (0.7, 0.7, 0.7), 117.975)
if "particle_243 geometry" not in marker_sets:
s=new_marker_set('particle_243 geometry')
marker_sets["particle_243 geometry"]=s
s= marker_sets["particle_243 geometry"]
mark=s.place_marker((2753.11, 2914.17, 3099.36), (0.7, 0.7, 0.7), 200.883)
if "particle_244 geometry" not in marker_sets:
s=new_marker_set('particle_244 geometry')
marker_sets["particle_244 geometry"]=s
s= marker_sets["particle_244 geometry"]
mark=s.place_marker((2891.88, 2883.9, 2808.01), (0.7, 0.7, 0.7), 158.794)
if "particle_245 geometry" not in marker_sets:
s=new_marker_set('particle_245 geometry')
marker_sets["particle_245 geometry"]=s
s= marker_sets["particle_245 geometry"]
mark=s.place_marker((3061.22, 3048.05, 2602.83), (0.7, 0.7, 0.7), 115.86)
if "particle_246 geometry" not in marker_sets:
s=new_marker_set('particle_246 geometry')
marker_sets["particle_246 geometry"]=s
s= marker_sets["particle_246 geometry"]
mark=s.place_marker((3067.04, 2897.15, 2415.84), (0.7, 0.7, 0.7), 133.034)
if "particle_247 geometry" not in marker_sets:
s=new_marker_set('particle_247 geometry')
marker_sets["particle_247 geometry"]=s
s= marker_sets["particle_247 geometry"]
mark=s.place_marker((3024.8, 2438.56, 2540.2), (0.7, 0.7, 0.7), 314.627)
if "particle_248 geometry" not in marker_sets:
s=new_marker_set('particle_248 geometry')
marker_sets["particle_248 geometry"]=s
s= marker_sets["particle_248 geometry"]
mark=s.place_marker((3001.15, 2676.18, 2814.74), (0.7, 0.7, 0.7), 115.352)
if "particle_249 geometry" not in marker_sets:
s=new_marker_set('particle_249 geometry')
marker_sets["particle_249 geometry"]=s
s= marker_sets["particle_249 geometry"]
mark=s.place_marker((3086.17, 3066.84, 2985.08), (0.7, 0.7, 0.7), 180.621)
if "particle_250 geometry" not in marker_sets:
s=new_marker_set('particle_250 geometry')
marker_sets["particle_250 geometry"]=s
s= marker_sets["particle_250 geometry"]
mark=s.place_marker((3250.02, 3278.47, 2722.78), (0.7, 0.7, 0.7), 126.265)
if "particle_251 geometry" not in marker_sets:
s=new_marker_set('particle_251 geometry')
marker_sets["particle_251 geometry"]=s
s= marker_sets["particle_251 geometry"]
mark=s.place_marker((3242.95, 3327.34, 2328.44), (0.7, 0.7, 0.7), 133.541)
if "particle_252 geometry" not in marker_sets:
s=new_marker_set('particle_252 geometry')
marker_sets["particle_252 geometry"]=s
s= marker_sets["particle_252 geometry"]
mark=s.place_marker((3240.56, 3578.72, 1955.57), (0.7, 0.7, 0.7), 171.019)
if "particle_253 geometry" not in marker_sets:
s=new_marker_set('particle_253 geometry')
marker_sets["particle_253 geometry"]=s
s= marker_sets["particle_253 geometry"]
mark=s.place_marker((3230.1, 3904.26, 1699.37), (0.7, 0.7, 0.7), 115.437)
if "particle_254 geometry" not in marker_sets:
s=new_marker_set('particle_254 geometry')
marker_sets["particle_254 geometry"]=s
s= marker_sets["particle_254 geometry"]
mark=s.place_marker((3486.95, 3782.97, 1824.99), (0.7, 0.7, 0.7), 158.583)
if "particle_255 geometry" not in marker_sets:
s=new_marker_set('particle_255 geometry')
marker_sets["particle_255 geometry"]=s
s= marker_sets["particle_255 geometry"]
mark=s.place_marker((3250.63, 3436.94, 1912.91), (0.7, 0.7, 0.7), 192)
if "particle_256 geometry" not in marker_sets:
s=new_marker_set('particle_256 geometry')
marker_sets["particle_256 geometry"]=s
s= marker_sets["particle_256 geometry"]
mark=s.place_marker((3124.55, 3047.5, 2094.26), (0.7, 0.7, 0.7), 150.165)
if "particle_257 geometry" not in marker_sets:
s=new_marker_set('particle_257 geometry')
marker_sets["particle_257 geometry"]=s
s= marker_sets["particle_257 geometry"]
mark=s.place_marker((2906.37, 2823.16, 2080.09), (0.7, 0.7, 0.7), 157.567)
if "particle_258 geometry" not in marker_sets:
s=new_marker_set('particle_258 geometry')
marker_sets["particle_258 geometry"]=s
s= marker_sets["particle_258 geometry"]
mark=s.place_marker((2768.76, 2865.77, 1994.34), (0.7, 0.7, 0.7), 199.36)
if "particle_259 geometry" not in marker_sets:
s=new_marker_set('particle_259 geometry')
marker_sets["particle_259 geometry"]=s
s= marker_sets["particle_259 geometry"]
mark=s.place_marker((2615.46, 3241.37, 2218.03), (0.7, 0.7, 0.7), 105.369)
if "particle_260 geometry" not in marker_sets:
s=new_marker_set('particle_260 geometry')
marker_sets["particle_260 geometry"]=s
s= marker_sets["particle_260 geometry"]
mark=s.place_marker((2407.52, 3298.82, 2397.05), (0.7, 0.7, 0.7), 118.651)
if "particle_261 geometry" not in marker_sets:
s=new_marker_set('particle_261 geometry')
marker_sets["particle_261 geometry"]=s
s= marker_sets["particle_261 geometry"]
mark=s.place_marker((2514.87, 2861.35, 2413.72), (0.7, 0.7, 0.7), 219.664)
if "particle_262 geometry" not in marker_sets:
s=new_marker_set('particle_262 geometry')
marker_sets["particle_262 geometry"]=s
s= marker_sets["particle_262 geometry"]
mark=s.place_marker((2794.9, 2373.95, 2203.27), (0.7, 0.7, 0.7), 196.018)
if "particle_263 geometry" not in marker_sets:
s=new_marker_set('particle_263 geometry')
marker_sets["particle_263 geometry"]=s
s= marker_sets["particle_263 geometry"]
mark=s.place_marker((2952.57, 1926.85, 2028.6), (0.7, 0.7, 0.7), 218.141)
if "particle_264 geometry" not in marker_sets:
s=new_marker_set('particle_264 geometry')
marker_sets["particle_264 geometry"]=s
s= marker_sets["particle_264 geometry"]
mark=s.place_marker((2743.5, 1919.84, 1740.05), (0.7, 0.7, 0.7), 181.636)
if "particle_265 geometry" not in marker_sets:
s=new_marker_set('particle_265 geometry')
marker_sets["particle_265 geometry"]=s
s= marker_sets["particle_265 geometry"]
mark=s.place_marker((2589.63, 2117.4, 1776.08), (0.7, 0.7, 0.7), 195.003)
if "particle_266 geometry" not in marker_sets:
s=new_marker_set('particle_266 geometry')
marker_sets["particle_266 geometry"]=s
s= marker_sets["particle_266 geometry"]
mark=s.place_marker((2770.38, 2008.18, 1707.94), (0.7, 0.7, 0.7), 139.209)
if "particle_267 geometry" not in marker_sets:
s=new_marker_set('particle_267 geometry')
marker_sets["particle_267 geometry"]=s
s= marker_sets["particle_267 geometry"]
mark=s.place_marker((2817.61, 2031.59, 1644.96), (0.7, 0.7, 0.7), 189.885)
if "particle_268 geometry" not in marker_sets:
s=new_marker_set('particle_268 geometry')
marker_sets["particle_268 geometry"]=s
s= marker_sets["particle_268 geometry"]
mark=s.place_marker((3017.78, 2267.08, 1760.65), (0.7, 0.7, 0.7), 267.674)
if "particle_269 geometry" not in marker_sets:
s=new_marker_set('particle_269 geometry')
marker_sets["particle_269 geometry"]=s
s= marker_sets["particle_269 geometry"]
mark=s.place_marker((3447.06, 2537.3, 2036.31), (0.7, 0.7, 0.7), 196.568)
if "particle_270 geometry" not in marker_sets:
s=new_marker_set('particle_270 geometry')
marker_sets["particle_270 geometry"]=s
s= marker_sets["particle_270 geometry"]
mark=s.place_marker((3408.91, 2641.7, 1765.76), (0.7, 0.7, 0.7), 192.423)
if "particle_271 geometry" not in marker_sets:
s=new_marker_set('particle_271 geometry')
marker_sets["particle_271 geometry"]=s
s= marker_sets["particle_271 geometry"]
mark=s.place_marker((3310.31, 2377.78, 1456.31), (1, 0.7, 0), 202.405)
if "particle_272 geometry" not in marker_sets:
s=new_marker_set('particle_272 geometry')
marker_sets["particle_272 geometry"]=s
s= marker_sets["particle_272 geometry"]
mark=s.place_marker((3586.56, 2967.21, 2046.9), (0.7, 0.7, 0.7), 135.529)
if "particle_273 geometry" not in marker_sets:
s=new_marker_set('particle_273 geometry')
marker_sets["particle_273 geometry"]=s
s= marker_sets["particle_273 geometry"]
mark=s.place_marker((3878.55, 3748.21, 2598.68), (0.7, 0.7, 0.7), 114.21)
if "particle_274 geometry" not in marker_sets:
s=new_marker_set('particle_274 geometry')
marker_sets["particle_274 geometry"]=s
s= marker_sets["particle_274 geometry"]
mark=s.place_marker((3583.26, 3838.27, 2679.72), (0.7, 0.7, 0.7), 159.133)
if "particle_275 geometry" not in marker_sets:
s=new_marker_set('particle_275 geometry')
marker_sets["particle_275 geometry"]=s
s= marker_sets["particle_275 geometry"]
mark=s.place_marker((3311.24, 3557.62, 2766.32), (0.7, 0.7, 0.7), 144.412)
if "particle_276 geometry" not in marker_sets:
s=new_marker_set('particle_276 geometry')
marker_sets["particle_276 geometry"]=s
s= marker_sets["particle_276 geometry"]
mark=s.place_marker((3117.69, 3332.84, 2843.72), (0.7, 0.7, 0.7), 70.8525)
if "particle_277 geometry" not in marker_sets:
s=new_marker_set('particle_277 geometry')
marker_sets["particle_277 geometry"]=s
s= marker_sets["particle_277 geometry"]
mark=s.place_marker((2962.76, 2882.39, 2443.71), (0.7, 0.7, 0.7), 141.874)
if "particle_278 geometry" not in marker_sets:
s=new_marker_set('particle_278 geometry')
marker_sets["particle_278 geometry"]=s
s= marker_sets["particle_278 geometry"]
mark=s.place_marker((2867.54, 2445.06, 2023.93), (0.7, 0.7, 0.7), 217.337)
if "particle_279 geometry" not in marker_sets:
s=new_marker_set('particle_279 geometry')
marker_sets["particle_279 geometry"]=s
s= marker_sets["particle_279 geometry"]
mark=s.place_marker((2899.27, 2373.73, 2050.51), (0.7, 0.7, 0.7), 237.641)
if "particle_280 geometry" not in marker_sets:
s=new_marker_set('particle_280 geometry')
marker_sets["particle_280 geometry"]=s
s= marker_sets["particle_280 geometry"]
mark=s.place_marker((2858.6, 2384.47, 2496.82), (0.7, 0.7, 0.7), 229.393)
if "particle_281 geometry" not in marker_sets:
s=new_marker_set('particle_281 geometry')
marker_sets["particle_281 geometry"]=s
s= marker_sets["particle_281 geometry"]
mark=s.place_marker((3181.6, 1883.4, 2447.25), (0.7, 0.7, 0.7), 349.906)
if "particle_282 geometry" not in marker_sets:
s=new_marker_set('particle_282 geometry')
marker_sets["particle_282 geometry"]=s
s= marker_sets["particle_282 geometry"]
mark=s.place_marker((3518.81, 1521.06, 2151.44), (0.7, 0.7, 0.7), 162.347)
if "particle_283 geometry" not in marker_sets:
s=new_marker_set('particle_283 geometry')
marker_sets["particle_283 geometry"]=s
s= marker_sets["particle_283 geometry"]
mark=s.place_marker((3647.82, 1418.99, 2143.71), (0.7, 0.7, 0.7), 194.072)
if "particle_284 geometry" not in marker_sets:
s=new_marker_set('particle_284 geometry')
marker_sets["particle_284 geometry"]=s
s= marker_sets["particle_284 geometry"]
mark=s.place_marker((3775.35, 1374.87, 2267.9), (0.7, 0.7, 0.7), 242.21)
if "particle_285 geometry" not in marker_sets:
s=new_marker_set('particle_285 geometry')
marker_sets["particle_285 geometry"]=s
s= marker_sets["particle_285 geometry"]
mark=s.place_marker((4140.61, 1528.88, 2550.11), (0.7, 0.7, 0.7), 320.93)
if "particle_286 geometry" not in marker_sets:
s=new_marker_set('particle_286 geometry')
marker_sets["particle_286 geometry"]=s
s= marker_sets["particle_286 geometry"]
mark=s.place_marker((4587.88, 1184.48, 2691.91), (0.7, 0.7, 0.7), 226.432)
if "particle_287 geometry" not in marker_sets:
s=new_marker_set('particle_287 geometry')
marker_sets["particle_287 geometry"]=s
s= marker_sets["particle_287 geometry"]
mark=s.place_marker((4343.84, 850.769, 2647.87), (0.7, 0.7, 0.7), 125.208)
if "particle_288 geometry" not in marker_sets:
s=new_marker_set('particle_288 geometry')
marker_sets["particle_288 geometry"]=s
s= marker_sets["particle_288 geometry"]
mark=s.place_marker((3849.59, 681.616, 2591.38), (0.7, 0.7, 0.7), 197.837)
if "particle_289 geometry" not in marker_sets:
s=new_marker_set('particle_289 geometry')
marker_sets["particle_289 geometry"]=s
s= marker_sets["particle_289 geometry"]
mark=s.place_marker((3615.16, 274.149, 3021.75), (0.7, 0.7, 0.7), 167.804)
if "particle_290 geometry" not in marker_sets:
s=new_marker_set('particle_290 geometry')
marker_sets["particle_290 geometry"]=s
s= marker_sets["particle_290 geometry"]
mark=s.place_marker((3630.84, -241.802, 3674.37), (0.7, 0.7, 0.7), 136.84)
if "particle_291 geometry" not in marker_sets:
s=new_marker_set('particle_291 geometry')
marker_sets["particle_291 geometry"]=s
s= marker_sets["particle_291 geometry"]
mark=s.place_marker((3800.44, 46.9771, 3991.57), (0.7, 0.7, 0.7), 85.7421)
if "particle_292 geometry" not in marker_sets:
s=new_marker_set('particle_292 geometry')
marker_sets["particle_292 geometry"]=s
s= marker_sets["particle_292 geometry"]
mark=s.place_marker((4065.14, 956.297, 2947.75), (1, 0.7, 0), 256)
if "particle_293 geometry" not in marker_sets:
s=new_marker_set('particle_293 geometry')
marker_sets["particle_293 geometry"]=s
s= marker_sets["particle_293 geometry"]
mark=s.place_marker((3442.46, 98.9698, 3276.91), (0.7, 0.7, 0.7), 138.702)
if "particle_294 geometry" not in marker_sets:
s=new_marker_set('particle_294 geometry')
marker_sets["particle_294 geometry"]=s
s= marker_sets["particle_294 geometry"]
mark=s.place_marker((3145.17, -287.101, 3275.74), (0.7, 0.7, 0.7), 140.732)
if "particle_295 geometry" not in marker_sets:
s=new_marker_set('particle_295 geometry')
marker_sets["particle_295 geometry"]=s
s= marker_sets["particle_295 geometry"]
mark=s.place_marker((3425.48, -234.99, 3171.21), (0.7, 0.7, 0.7), 81.3006)
if "particle_296 geometry" not in marker_sets:
s=new_marker_set('particle_296 geometry')
marker_sets["particle_296 geometry"]=s
s= marker_sets["particle_296 geometry"]
mark=s.place_marker((3819.37, -394.19, 3319.82), (0.7, 0.7, 0.7), 133.837)
if "particle_297 geometry" not in marker_sets:
s=new_marker_set('particle_297 geometry')
marker_sets["particle_297 geometry"]=s
s= marker_sets["particle_297 geometry"]
mark=s.place_marker((3923.54, 179.097, 3066.71), (0.7, 0.7, 0.7), 98.3475)
if "particle_298 geometry" not in marker_sets:
s=new_marker_set('particle_298 geometry')
marker_sets["particle_298 geometry"]=s
s= marker_sets["particle_298 geometry"]
mark=s.place_marker((3788.17, 772.879, 2523.06), (0.7, 0.7, 0.7), 297.623)
if "particle_299 geometry" not in marker_sets:
s=new_marker_set('particle_299 geometry')
marker_sets["particle_299 geometry"]=s
s= marker_sets["particle_299 geometry"]
mark=s.place_marker((3825.55, 1188.71, 2414.53), (0.7, 0.7, 0.7), 212.938)
if "particle_300 geometry" not in marker_sets:
s=new_marker_set('particle_300 geometry')
marker_sets["particle_300 geometry"]=s
s= marker_sets["particle_300 geometry"]
mark=s.place_marker((3964.29, 1178.19, 2279.8), (0.7, 0.7, 0.7), 154.183)
if "particle_301 geometry" not in marker_sets:
s=new_marker_set('particle_301 geometry')
marker_sets["particle_301 geometry"]=s
s= marker_sets["particle_301 geometry"]
mark=s.place_marker((4339.68, 1218.74, 2371.45), (0.7, 0.7, 0.7), 180.832)
if "particle_302 geometry" not in marker_sets:
s=new_marker_set('particle_302 geometry')
marker_sets["particle_302 geometry"]=s
s= marker_sets["particle_302 geometry"]
mark=s.place_marker((4512.23, 1423.46, 2622.29), (0.7, 0.7, 0.7), 122.332)
if "particle_303 geometry" not in marker_sets:
s=new_marker_set('particle_303 geometry')
marker_sets["particle_303 geometry"]=s
s= marker_sets["particle_303 geometry"]
mark=s.place_marker((4557.59, 1618.01, 2936.67), (0.7, 0.7, 0.7), 209.047)
if "particle_304 geometry" not in marker_sets:
s=new_marker_set('particle_304 geometry')
marker_sets["particle_304 geometry"]=s
s= marker_sets["particle_304 geometry"]
mark=s.place_marker((4934.16, 1541.72, 2713.71), (0.7, 0.7, 0.7), 126.985)
if "particle_305 geometry" not in marker_sets:
s=new_marker_set('particle_305 geometry')
marker_sets["particle_305 geometry"]=s
s= marker_sets["particle_305 geometry"]
mark=s.place_marker((5299, 1463.87, 2787.72), (0.7, 0.7, 0.7), 122.205)
if "particle_306 geometry" not in marker_sets:
s=new_marker_set('particle_306 geometry')
marker_sets["particle_306 geometry"]=s
s= marker_sets["particle_306 geometry"]
mark=s.place_marker((5368.58, 1253.45, 2783.1), (0.7, 0.7, 0.7), 107.95)
if "particle_307 geometry" not in marker_sets:
s=new_marker_set('particle_307 geometry')
marker_sets["particle_307 geometry"]=s
s= marker_sets["particle_307 geometry"]
mark=s.place_marker((4841.66, 1333.59, 2492.75), (0.7, 0.7, 0.7), 182.567)
if "particle_308 geometry" not in marker_sets:
s=new_marker_set('particle_308 geometry')
marker_sets["particle_308 geometry"]=s
s= marker_sets["particle_308 geometry"]
mark=s.place_marker((4217.64, 1360.5, 2369.14), (0.7, 0.7, 0.7), 185.274)
if "particle_309 geometry" not in marker_sets:
s=new_marker_set('particle_309 geometry')
marker_sets["particle_309 geometry"]=s
s= marker_sets["particle_309 geometry"]
mark=s.place_marker((3780.26, 1483.93, 2537.48), (0.7, 0.7, 0.7), 413.567)
if "particle_310 geometry" not in marker_sets:
s=new_marker_set('particle_310 geometry')
marker_sets["particle_310 geometry"]=s
s= marker_sets["particle_310 geometry"]
mark=s.place_marker((3652.51, 1453.06, 2344.27), (0.7, 0.7, 0.7), 240.01)
if "particle_311 geometry" not in marker_sets:
s=new_marker_set('particle_311 geometry')
marker_sets["particle_311 geometry"]=s
s= marker_sets["particle_311 geometry"]
mark=s.place_marker((3674, 1476.45, 2375.9), (0.7, 0.7, 0.7), 238.995)
if "particle_312 geometry" not in marker_sets:
s=new_marker_set('particle_312 geometry')
marker_sets["particle_312 geometry"]=s
s= marker_sets["particle_312 geometry"]
mark=s.place_marker((3909.68, 1368.86, 2247.83), (0.7, 0.7, 0.7), 203.674)
if "particle_313 geometry" not in marker_sets:
s=new_marker_set('particle_313 geometry')
marker_sets["particle_313 geometry"]=s
s= marker_sets["particle_313 geometry"]
mark=s.place_marker((4426.4, 1534.59, 1974.34), (0.7, 0.7, 0.7), 266.744)
if "particle_314 geometry" not in marker_sets:
s=new_marker_set('particle_314 geometry')
marker_sets["particle_314 geometry"]=s
s= marker_sets["particle_314 geometry"]
mark=s.place_marker((4405.33, 1111.17, 1813.09), (0.7, 0.7, 0.7), 147.585)
if "particle_315 geometry" not in marker_sets:
s=new_marker_set('particle_315 geometry')
marker_sets["particle_315 geometry"]=s
s= marker_sets["particle_315 geometry"]
mark=s.place_marker((4181.68, 1249.73, 1932.41), (0.7, 0.7, 0.7), 249.485)
if "particle_316 geometry" not in marker_sets:
s=new_marker_set('particle_316 geometry')
marker_sets["particle_316 geometry"]=s
s= marker_sets["particle_316 geometry"]
mark=s.place_marker((4114.31, 1629.23, 2117.6), (0.7, 0.7, 0.7), 119.371)
if "particle_317 geometry" not in marker_sets:
s=new_marker_set('particle_317 geometry')
marker_sets["particle_317 geometry"]=s
s= marker_sets["particle_317 geometry"]
mark=s.place_marker((4282.12, 1983.7, 2696.53), (0.7, 0.7, 0.7), 155.875)
if "particle_318 geometry" not in marker_sets:
s=new_marker_set('particle_318 geometry')
marker_sets["particle_318 geometry"]=s
s= marker_sets["particle_318 geometry"]
mark=s.place_marker((4278.76, 1928.6, 3460.65), (0.7, 0.7, 0.7), 189.419)
if "particle_319 geometry" not in marker_sets:
s=new_marker_set('particle_319 geometry')
marker_sets["particle_319 geometry"]=s
s= marker_sets["particle_319 geometry"]
mark=s.place_marker((3830.68, 1679.17, 3683.46), (0.7, 0.7, 0.7), 137.475)
if "particle_320 geometry" not in marker_sets:
s=new_marker_set('particle_320 geometry')
marker_sets["particle_320 geometry"]=s
s= marker_sets["particle_320 geometry"]
mark=s.place_marker((3357.3, 1509.66, 3628.1), (0.7, 0.7, 0.7), 176.179)
if "particle_321 geometry" not in marker_sets:
s=new_marker_set('particle_321 geometry')
marker_sets["particle_321 geometry"]=s
s= marker_sets["particle_321 geometry"]
mark=s.place_marker((2972.04, 1248.53, 3703.38), (0.7, 0.7, 0.7), 138.829)
if "particle_322 geometry" not in marker_sets:
s=new_marker_set('particle_322 geometry')
marker_sets["particle_322 geometry"]=s
s= marker_sets["particle_322 geometry"]
mark=s.place_marker((2732.25, 916.748, 3818.9), (0.7, 0.7, 0.7), 148.727)
if "particle_323 geometry" not in marker_sets:
s=new_marker_set('particle_323 geometry')
marker_sets["particle_323 geometry"]=s
s= marker_sets["particle_323 geometry"]
mark=s.place_marker((2604.57, 493.978, 4104.24), (0.7, 0.7, 0.7), 230.323)
if "particle_324 geometry" not in marker_sets:
s=new_marker_set('particle_324 geometry')
marker_sets["particle_324 geometry"]=s
s= marker_sets["particle_324 geometry"]
mark=s.place_marker((3083.82, 833.176, 3876.84), (0.7, 0.7, 0.7), 175.376)
if "particle_325 geometry" not in marker_sets:
s=new_marker_set('particle_325 geometry')
marker_sets["particle_325 geometry"]=s
s= marker_sets["particle_325 geometry"]
mark=s.place_marker((3311.54, 1175.78, 3586.08), (0.7, 0.7, 0.7), 161.163)
if "particle_326 geometry" not in marker_sets:
s=new_marker_set('particle_326 geometry')
marker_sets["particle_326 geometry"]=s
s= marker_sets["particle_326 geometry"]
mark=s.place_marker((3171.82, 1508.99, 3896.27), (0.7, 0.7, 0.7), 125.885)
if "particle_327 geometry" not in marker_sets:
s=new_marker_set('particle_327 geometry')
marker_sets["particle_327 geometry"]=s
s= marker_sets["particle_327 geometry"]
mark=s.place_marker((3232.17, 1772.56, 4274.81), (0.7, 0.7, 0.7), 206.635)
if "particle_328 geometry" not in marker_sets:
s=new_marker_set('particle_328 geometry')
marker_sets["particle_328 geometry"]=s
s= marker_sets["particle_328 geometry"]
mark=s.place_marker((2894.5, 1769.45, 3960.72), (0.7, 0.7, 0.7), 151.392)
if "particle_329 geometry" not in marker_sets:
s=new_marker_set('particle_329 geometry')
marker_sets["particle_329 geometry"]=s
s= marker_sets["particle_329 geometry"]
mark=s.place_marker((2642.06, 1645.16, 3707.15), (0.7, 0.7, 0.7), 173.388)
if "particle_330 geometry" not in marker_sets:
s=new_marker_set('particle_330 geometry')
marker_sets["particle_330 geometry"]=s
s= marker_sets["particle_330 geometry"]
mark=s.place_marker((2546.54, 1328.13, 3788.34), (0.7, 0.7, 0.7), 135.825)
if "particle_331 geometry" not in marker_sets:
s=new_marker_set('particle_331 geometry')
marker_sets["particle_331 geometry"]=s
s= marker_sets["particle_331 geometry"]
mark=s.place_marker((2556.82, 974.975, 4037.55), (0.7, 0.7, 0.7), 186.839)
if "particle_332 geometry" not in marker_sets:
s=new_marker_set('particle_332 geometry')
marker_sets["particle_332 geometry"]=s
s= marker_sets["particle_332 geometry"]
mark=s.place_marker((2615.22, 623.419, 4345.99), (0.7, 0.7, 0.7), 121.189)
if "particle_333 geometry" not in marker_sets:
s=new_marker_set('particle_333 geometry')
marker_sets["particle_333 geometry"]=s
s= marker_sets["particle_333 geometry"]
mark=s.place_marker((2854.68, 902.091, 4130.34), (0.7, 0.7, 0.7), 102.916)
if "particle_334 geometry" not in marker_sets:
s=new_marker_set('particle_334 geometry')
marker_sets["particle_334 geometry"]=s
s= marker_sets["particle_334 geometry"]
mark=s.place_marker((3208.77, 1220.3, 3721.46), (0.7, 0.7, 0.7), 212.769)
if "particle_335 geometry" not in marker_sets:
s=new_marker_set('particle_335 geometry')
marker_sets["particle_335 geometry"]=s
s= marker_sets["particle_335 geometry"]
mark=s.place_marker((3343.58, 1628.36, 3191.35), (0.7, 0.7, 0.7), 173.092)
if "particle_336 geometry" not in marker_sets:
s=new_marker_set('particle_336 geometry')
marker_sets["particle_336 geometry"]=s
s= marker_sets["particle_336 geometry"]
mark=s.place_marker((3669.27, 1920.06, 2919.25), (0.7, 0.7, 0.7), 264.502)
if "particle_337 geometry" not in marker_sets:
s=new_marker_set('particle_337 geometry')
marker_sets["particle_337 geometry"]=s
s= marker_sets["particle_337 geometry"]
mark=s.place_marker((4210.13, 2073.6, 2952.3), (0.7, 0.7, 0.7), 208.666)
if "particle_338 geometry" not in marker_sets:
s=new_marker_set('particle_338 geometry')
marker_sets["particle_338 geometry"]=s
s= marker_sets["particle_338 geometry"]
mark=s.place_marker((4692.53, 1949.28, 2936.61), (0.7, 0.7, 0.7), 186.797)
if "particle_339 geometry" not in marker_sets:
s=new_marker_set('particle_339 geometry')
marker_sets["particle_339 geometry"]=s
s= marker_sets["particle_339 geometry"]
mark=s.place_marker((4841.95, 1807.81, 2475.35), (0.7, 0.7, 0.7), 255.534)
if "particle_340 geometry" not in marker_sets:
s=new_marker_set('particle_340 geometry')
marker_sets["particle_340 geometry"]=s
s= marker_sets["particle_340 geometry"]
mark=s.place_marker((4972.15, 1406.53, 2389.31), (0.7, 0.7, 0.7), 153.126)
if "particle_341 geometry" not in marker_sets:
s=new_marker_set('particle_341 geometry')
marker_sets["particle_341 geometry"]=s
s= marker_sets["particle_341 geometry"]
mark=s.place_marker((5275.08, 1544.1, 2588.1), (0.7, 0.7, 0.7), 165.816)
if "particle_342 geometry" not in marker_sets:
s=new_marker_set('particle_342 geometry')
marker_sets["particle_342 geometry"]=s
s= marker_sets["particle_342 geometry"]
mark=s.place_marker((5116.52, 1856.98, 2426.61), (0.7, 0.7, 0.7), 134.429)
if "particle_343 geometry" not in marker_sets:
s=new_marker_set('particle_343 geometry')
marker_sets["particle_343 geometry"]=s
s= marker_sets["particle_343 geometry"]
mark=s.place_marker((4806.5, 2023.59, 2572.85), (0.7, 0.7, 0.7), 178.971)
if "particle_344 geometry" not in marker_sets:
s=new_marker_set('particle_344 geometry')
marker_sets["particle_344 geometry"]=s
s= marker_sets["particle_344 geometry"]
mark=s.place_marker((4605.48, 1927.03, 3025.94), (0.7, 0.7, 0.7), 189.969)
if "particle_345 geometry" not in marker_sets:
s=new_marker_set('particle_345 geometry')
marker_sets["particle_345 geometry"]=s
s= marker_sets["particle_345 geometry"]
mark=s.place_marker((4818.49, 1972.98, 3611.69), (0.7, 0.7, 0.7), 121.359)
if "particle_346 geometry" not in marker_sets:
s=new_marker_set('particle_346 geometry')
marker_sets["particle_346 geometry"]=s
s= marker_sets["particle_346 geometry"]
mark=s.place_marker((4451.67, 1878.25, 4007.94), (0.7, 0.7, 0.7), 187.262)
if "particle_347 geometry" not in marker_sets:
s=new_marker_set('particle_347 geometry')
marker_sets["particle_347 geometry"]=s
s= marker_sets["particle_347 geometry"]
mark=s.place_marker((3794.94, 1826, 4019.31), (0.7, 0.7, 0.7), 164.335)
if "particle_348 geometry" not in marker_sets:
s=new_marker_set('particle_348 geometry')
marker_sets["particle_348 geometry"]=s
s= marker_sets["particle_348 geometry"]
mark=s.place_marker((3474.61, 1403.99, 3967.78), (0.7, 0.7, 0.7), 138.363)
if "particle_349 geometry" not in marker_sets:
s=new_marker_set('particle_349 geometry')
marker_sets["particle_349 geometry"]=s
s= marker_sets["particle_349 geometry"]
mark=s.place_marker((3212.17, 1125.66, 4074.27), (0.7, 0.7, 0.7), 138.49)
if "particle_350 geometry" not in marker_sets:
s=new_marker_set('particle_350 geometry')
marker_sets["particle_350 geometry"]=s
s= marker_sets["particle_350 geometry"]
mark=s.place_marker((2968.43, 1367.03, 4137.98), (0.7, 0.7, 0.7), 116.325)
if "particle_351 geometry" not in marker_sets:
s=new_marker_set('particle_351 geometry')
marker_sets["particle_351 geometry"]=s
s= marker_sets["particle_351 geometry"]
mark=s.place_marker((3212.15, 1706.54, 3961.1), (0.7, 0.7, 0.7), 106.511)
if "particle_352 geometry" not in marker_sets:
s=new_marker_set('particle_352 geometry')
marker_sets["particle_352 geometry"]=s
s= marker_sets["particle_352 geometry"]
mark=s.place_marker((3652.52, 1853.16, 3662.66), (0.7, 0.7, 0.7), 151.096)
if "particle_353 geometry" not in marker_sets:
s=new_marker_set('particle_353 geometry')
marker_sets["particle_353 geometry"]=s
s= marker_sets["particle_353 geometry"]
mark=s.place_marker((4290.51, 1896.98, 3439.89), (0.7, 0.7, 0.7), 240.856)
if "particle_354 geometry" not in marker_sets:
s=new_marker_set('particle_354 geometry')
marker_sets["particle_354 geometry"]=s
s= marker_sets["particle_354 geometry"]
mark=s.place_marker((4781.05, 1830.84, 3273.57), (0.7, 0.7, 0.7), 149.7)
if "particle_355 geometry" not in marker_sets:
s=new_marker_set('particle_355 geometry')
marker_sets["particle_355 geometry"]=s
s= marker_sets["particle_355 geometry"]
mark=s.place_marker((4956.14, 1911.11, 2986.89), (0.7, 0.7, 0.7), 165.943)
if "particle_356 geometry" not in marker_sets:
s=new_marker_set('particle_356 geometry')
marker_sets["particle_356 geometry"]=s
s= marker_sets["particle_356 geometry"]
mark=s.place_marker((4577.39, 1875.47, 2505.88), (0.7, 0.7, 0.7), 178.971)
if "particle_357 geometry" not in marker_sets:
s=new_marker_set('particle_357 geometry')
marker_sets["particle_357 geometry"]=s
s= marker_sets["particle_357 geometry"]
mark=s.place_marker((4299.37, 2028.87, 1826.51), (0.7, 0.7, 0.7), 154.945)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| gpl-3.0 |
xzturn/tensorflow | tensorflow/python/framework/kernels.py | 18 | 1709 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions for querying registered kernels."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.framework import kernel_def_pb2
from tensorflow.python.client import pywrap_tf_session as c_api
from tensorflow.python.util import compat
def get_all_registered_kernels():
"""Returns a KernelList proto of all registered kernels.
"""
buf = c_api.TF_GetAllRegisteredKernels()
data = c_api.TF_GetBuffer(buf)
kernel_list = kernel_def_pb2.KernelList()
kernel_list.ParseFromString(compat.as_bytes(data))
return kernel_list
def get_registered_kernels_for_op(name):
"""Returns a KernelList proto of registered kernels for a given op.
Args:
name: A string representing the name of the op whose kernels to retrieve.
"""
buf = c_api.TF_GetRegisteredKernelsForOp(name)
data = c_api.TF_GetBuffer(buf)
kernel_list = kernel_def_pb2.KernelList()
kernel_list.ParseFromString(compat.as_bytes(data))
return kernel_list
| apache-2.0 |
ryanahall/django | django/db/backends/mysql/schema.py | 18 | 2871 | from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.models import NOT_PROVIDED
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_rename_table = "RENAME TABLE %(old_table)s TO %(new_table)s"
sql_alter_column_null = "MODIFY %(column)s %(type)s NULL"
sql_alter_column_not_null = "MODIFY %(column)s %(type)s NOT NULL"
sql_alter_column_type = "MODIFY %(column)s %(type)s"
sql_rename_column = "ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s"
sql_delete_unique = "ALTER TABLE %(table)s DROP INDEX %(name)s"
sql_create_fk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY "
"(%(column)s) REFERENCES %(to_table)s (%(to_column)s)"
)
sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s"
sql_delete_index = "DROP INDEX %(name)s ON %(table)s"
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;'
alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;'
sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
def quote_value(self, value):
# Inner import to allow module to fail to load gracefully
import MySQLdb.converters
return MySQLdb.escape(value, MySQLdb.converters.conversions)
def skip_default(self, field):
"""
MySQL doesn't accept default values for longtext and longblob
and implicitly treats these columns as nullable.
"""
return field.db_type(self.connection) in {'longtext', 'longblob'}
def add_field(self, model, field):
super(DatabaseSchemaEditor, self).add_field(model, field)
# Simulate the effect of a one-off default.
if self.skip_default(field) and field.default not in {None, NOT_PROVIDED}:
effective_default = self.effective_default(field)
self.execute('UPDATE %(table)s SET %(column)s = %%s' % {
'table': self.quote_name(model._meta.db_table),
'column': self.quote_name(field.column),
}, [effective_default])
def _model_indexes_sql(self, model):
storage = self.connection.introspection.get_storage_engine(
self.connection.cursor(), model._meta.db_table
)
if storage == "InnoDB":
for field in model._meta.local_fields:
if field.db_index and not field.unique and field.get_internal_type() == "ForeignKey":
# Temporary setting db_index to False (in memory) to disable
# index creation for FKs (index automatically created by MySQL)
field.db_index = False
return super(DatabaseSchemaEditor, self)._model_indexes_sql(model)
| bsd-3-clause |
alfredodeza/boto | boto/exception.py | 117 | 17106 | # Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Exception classes - Subclassing allows you to check for specific errors
"""
import base64
import xml.sax
import boto
from boto import handler
from boto.compat import json, StandardError
from boto.resultset import ResultSet
class BotoClientError(StandardError):
"""
General Boto Client error (error accessing AWS)
"""
def __init__(self, reason, *args):
super(BotoClientError, self).__init__(reason, *args)
self.reason = reason
def __repr__(self):
return 'BotoClientError: %s' % self.reason
def __str__(self):
return 'BotoClientError: %s' % self.reason
class SDBPersistenceError(StandardError):
pass
class StoragePermissionsError(BotoClientError):
"""
Permissions error when accessing a bucket or key on a storage service.
"""
pass
class S3PermissionsError(StoragePermissionsError):
"""
Permissions error when accessing a bucket or key on S3.
"""
pass
class GSPermissionsError(StoragePermissionsError):
"""
Permissions error when accessing a bucket or key on GS.
"""
pass
class BotoServerError(StandardError):
def __init__(self, status, reason, body=None, *args):
super(BotoServerError, self).__init__(status, reason, body, *args)
self.status = status
self.reason = reason
self.body = body or ''
self.request_id = None
self.error_code = None
self._error_message = None
self.message = ''
self.box_usage = None
if isinstance(self.body, bytes):
try:
self.body = self.body.decode('utf-8')
except UnicodeDecodeError:
boto.log.debug('Unable to decode body from bytes!')
# Attempt to parse the error response. If body isn't present,
# then just ignore the error response.
if self.body:
# Check if it looks like a ``dict``.
if hasattr(self.body, 'items'):
# It's not a string, so trying to parse it will fail.
# But since it's data, we can work with that.
self.request_id = self.body.get('RequestId', None)
if 'Error' in self.body:
# XML-style
error = self.body.get('Error', {})
self.error_code = error.get('Code', None)
self.message = error.get('Message', None)
else:
# JSON-style.
self.message = self.body.get('message', None)
else:
try:
h = handler.XmlHandlerWrapper(self, self)
h.parseString(self.body)
except (TypeError, xml.sax.SAXParseException):
# What if it's JSON? Let's try that.
try:
parsed = json.loads(self.body)
if 'RequestId' in parsed:
self.request_id = parsed['RequestId']
if 'Error' in parsed:
if 'Code' in parsed['Error']:
self.error_code = parsed['Error']['Code']
if 'Message' in parsed['Error']:
self.message = parsed['Error']['Message']
except (TypeError, ValueError):
# Remove unparsable message body so we don't include garbage
# in exception. But first, save self.body in self.error_message
# because occasionally we get error messages from Eucalyptus
# that are just text strings that we want to preserve.
self.message = self.body
self.body = None
def __getattr__(self, name):
if name == 'error_message':
return self.message
if name == 'code':
return self.error_code
raise AttributeError
def __setattr__(self, name, value):
if name == 'error_message':
self.message = value
else:
super(BotoServerError, self).__setattr__(name, value)
def __repr__(self):
return '%s: %s %s\n%s' % (self.__class__.__name__,
self.status, self.reason, self.body)
def __str__(self):
return '%s: %s %s\n%s' % (self.__class__.__name__,
self.status, self.reason, self.body)
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name in ('RequestId', 'RequestID'):
self.request_id = value
elif name == 'Code':
self.error_code = value
elif name == 'Message':
self.message = value
elif name == 'BoxUsage':
self.box_usage = value
return None
def _cleanupParsedProperties(self):
self.request_id = None
self.error_code = None
self.message = None
self.box_usage = None
class ConsoleOutput(object):
def __init__(self, parent=None):
self.parent = parent
self.instance_id = None
self.timestamp = None
self.comment = None
self.output = None
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'instanceId':
self.instance_id = value
elif name == 'output':
self.output = base64.b64decode(value)
else:
setattr(self, name, value)
class StorageCreateError(BotoServerError):
"""
Error creating a bucket or key on a storage service.
"""
def __init__(self, status, reason, body=None):
self.bucket = None
super(StorageCreateError, self).__init__(status, reason, body)
def endElement(self, name, value, connection):
if name == 'BucketName':
self.bucket = value
else:
return super(StorageCreateError, self).endElement(name, value, connection)
class S3CreateError(StorageCreateError):
"""
Error creating a bucket or key on S3.
"""
pass
class GSCreateError(StorageCreateError):
"""
Error creating a bucket or key on GS.
"""
pass
class StorageCopyError(BotoServerError):
"""
Error copying a key on a storage service.
"""
pass
class S3CopyError(StorageCopyError):
"""
Error copying a key on S3.
"""
pass
class GSCopyError(StorageCopyError):
"""
Error copying a key on GS.
"""
pass
class SQSError(BotoServerError):
"""
General Error on Simple Queue Service.
"""
def __init__(self, status, reason, body=None):
self.detail = None
self.type = None
super(SQSError, self).__init__(status, reason, body)
def startElement(self, name, attrs, connection):
return super(SQSError, self).startElement(name, attrs, connection)
def endElement(self, name, value, connection):
if name == 'Detail':
self.detail = value
elif name == 'Type':
self.type = value
else:
return super(SQSError, self).endElement(name, value, connection)
def _cleanupParsedProperties(self):
super(SQSError, self)._cleanupParsedProperties()
for p in ('detail', 'type'):
setattr(self, p, None)
class SQSDecodeError(BotoClientError):
"""
Error when decoding an SQS message.
"""
def __init__(self, reason, message):
super(SQSDecodeError, self).__init__(reason, message)
self.message = message
def __repr__(self):
return 'SQSDecodeError: %s' % self.reason
def __str__(self):
return 'SQSDecodeError: %s' % self.reason
class StorageResponseError(BotoServerError):
"""
Error in response from a storage service.
"""
def __init__(self, status, reason, body=None):
self.resource = None
super(StorageResponseError, self).__init__(status, reason, body)
def startElement(self, name, attrs, connection):
return super(StorageResponseError, self).startElement(
name, attrs, connection)
def endElement(self, name, value, connection):
if name == 'Resource':
self.resource = value
else:
return super(StorageResponseError, self).endElement(
name, value, connection)
def _cleanupParsedProperties(self):
super(StorageResponseError, self)._cleanupParsedProperties()
for p in ('resource'):
setattr(self, p, None)
class S3ResponseError(StorageResponseError):
"""
Error in response from S3.
"""
pass
class GSResponseError(StorageResponseError):
"""
Error in response from GS.
"""
pass
class EC2ResponseError(BotoServerError):
"""
Error in response from EC2.
"""
def __init__(self, status, reason, body=None):
self.errors = None
self._errorResultSet = []
super(EC2ResponseError, self).__init__(status, reason, body)
self.errors = [
(e.error_code, e.error_message) for e in self._errorResultSet]
if len(self.errors):
self.error_code, self.error_message = self.errors[0]
def startElement(self, name, attrs, connection):
if name == 'Errors':
self._errorResultSet = ResultSet([('Error', _EC2Error)])
return self._errorResultSet
else:
return None
def endElement(self, name, value, connection):
if name == 'RequestID':
self.request_id = value
else:
return None # don't call subclass here
def _cleanupParsedProperties(self):
super(EC2ResponseError, self)._cleanupParsedProperties()
self._errorResultSet = []
for p in ('errors'):
setattr(self, p, None)
class JSONResponseError(BotoServerError):
"""
This exception expects the fully parsed and decoded JSON response
body to be passed as the body parameter.
:ivar status: The HTTP status code.
:ivar reason: The HTTP reason message.
:ivar body: The Python dict that represents the decoded JSON
response body.
:ivar error_message: The full description of the AWS error encountered.
:ivar error_code: A short string that identifies the AWS error
(e.g. ConditionalCheckFailedException)
"""
def __init__(self, status, reason, body=None, *args):
self.status = status
self.reason = reason
self.body = body
if self.body:
self.error_message = self.body.get('message', None)
self.error_code = self.body.get('__type', None)
if self.error_code:
self.error_code = self.error_code.split('#')[-1]
class DynamoDBResponseError(JSONResponseError):
pass
class SWFResponseError(JSONResponseError):
pass
class EmrResponseError(BotoServerError):
"""
Error in response from EMR
"""
pass
class _EC2Error(object):
def __init__(self, connection=None):
self.connection = connection
self.error_code = None
self.error_message = None
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Code':
self.error_code = value
elif name == 'Message':
self.error_message = value
else:
return None
class SDBResponseError(BotoServerError):
"""
Error in responses from SDB.
"""
pass
class AWSConnectionError(BotoClientError):
"""
General error connecting to Amazon Web Services.
"""
pass
class StorageDataError(BotoClientError):
"""
Error receiving data from a storage service.
"""
pass
class S3DataError(StorageDataError):
"""
Error receiving data from S3.
"""
pass
class GSDataError(StorageDataError):
"""
Error receiving data from GS.
"""
pass
class InvalidUriError(Exception):
"""Exception raised when URI is invalid."""
def __init__(self, message):
super(InvalidUriError, self).__init__(message)
self.message = message
class InvalidAclError(Exception):
"""Exception raised when ACL XML is invalid."""
def __init__(self, message):
super(InvalidAclError, self).__init__(message)
self.message = message
class InvalidCorsError(Exception):
"""Exception raised when CORS XML is invalid."""
def __init__(self, message):
super(InvalidCorsError, self).__init__(message)
self.message = message
class NoAuthHandlerFound(Exception):
"""Is raised when no auth handlers were found ready to authenticate."""
pass
class InvalidLifecycleConfigError(Exception):
"""Exception raised when GCS lifecycle configuration XML is invalid."""
def __init__(self, message):
super(InvalidLifecycleConfigError, self).__init__(message)
self.message = message
# Enum class for resumable upload failure disposition.
class ResumableTransferDisposition(object):
# START_OVER means an attempt to resume an existing transfer failed,
# and a new resumable upload should be attempted (without delay).
START_OVER = 'START_OVER'
# WAIT_BEFORE_RETRY means the resumable transfer failed but that it can
# be retried after a time delay within the current process.
WAIT_BEFORE_RETRY = 'WAIT_BEFORE_RETRY'
# ABORT_CUR_PROCESS means the resumable transfer failed and that
# delaying/retrying within the current process will not help. If
# resumable transfer included a state tracker file the upload can be
# retried again later, in another process (e.g., a later run of gsutil).
ABORT_CUR_PROCESS = 'ABORT_CUR_PROCESS'
# ABORT means the resumable transfer failed in a way that it does not
# make sense to continue in the current process, and further that the
# current tracker ID should not be preserved (in a tracker file if one
# was specified at resumable upload start time). If the user tries again
# later (e.g., a separate run of gsutil) it will get a new resumable
# upload ID.
ABORT = 'ABORT'
class ResumableUploadException(Exception):
"""
Exception raised for various resumable upload problems.
self.disposition is of type ResumableTransferDisposition.
"""
def __init__(self, message, disposition):
super(ResumableUploadException, self).__init__(message, disposition)
self.message = message
self.disposition = disposition
def __repr__(self):
return 'ResumableUploadException("%s", %s)' % (
self.message, self.disposition)
class ResumableDownloadException(Exception):
"""
Exception raised for various resumable download problems.
self.disposition is of type ResumableTransferDisposition.
"""
def __init__(self, message, disposition):
super(ResumableDownloadException, self).__init__(message, disposition)
self.message = message
self.disposition = disposition
def __repr__(self):
return 'ResumableDownloadException("%s", %s)' % (
self.message, self.disposition)
class TooManyRecordsException(Exception):
"""
Exception raised when a search of Route53 records returns more
records than requested.
"""
def __init__(self, message):
super(TooManyRecordsException, self).__init__(message)
self.message = message
class PleaseRetryException(Exception):
"""
Indicates a request should be retried.
"""
def __init__(self, message, response=None):
self.message = message
self.response = response
def __repr__(self):
return 'PleaseRetryException("%s", %s)' % (
self.message,
self.response
)
| mit |
ttsubo/ryu | ryu/lib/ofctl_v1_0.py | 1 | 18053 | # Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import struct
import socket
import logging
from ryu.ofproto import ofproto_v1_0
from ryu.lib import hub
from ryu.lib.mac import haddr_to_bin, haddr_to_str
LOG = logging.getLogger('ryu.lib.ofctl_v1_0')
DEFAULT_TIMEOUT = 1.0 # TODO:XXX
def to_actions(dp, acts):
actions = []
for a in acts:
action_type = a.get('type')
if action_type == 'OUTPUT':
port = int(a.get('port', ofproto_v1_0.OFPP_NONE))
# NOTE: The reason of this magic number (0xffe5)
# is because there is no good constant in of1.0.
# The same value as OFPCML_MAX of of1.2 and of1.3 is used.
max_len = int(a.get('max_len', 0xffe5))
actions.append(dp.ofproto_parser.OFPActionOutput(port, max_len))
elif action_type == 'SET_VLAN_VID':
vlan_vid = int(a.get('vlan_vid', 0xffff))
actions.append(dp.ofproto_parser.OFPActionVlanVid(vlan_vid))
elif action_type == 'SET_VLAN_PCP':
vlan_pcp = int(a.get('vlan_pcp', 0))
actions.append(dp.ofproto_parser.OFPActionVlanPcp(vlan_pcp))
elif action_type == 'STRIP_VLAN':
actions.append(dp.ofproto_parser.OFPActionStripVlan())
elif action_type == 'SET_DL_SRC':
dl_src = haddr_to_bin(a.get('dl_src'))
actions.append(dp.ofproto_parser.OFPActionSetDlSrc(dl_src))
elif action_type == 'SET_DL_DST':
dl_dst = haddr_to_bin(a.get('dl_dst'))
actions.append(dp.ofproto_parser.OFPActionSetDlDst(dl_dst))
elif action_type == 'SET_NW_SRC':
nw_src = ipv4_to_int(a.get('nw_src'))
actions.append(dp.ofproto_parser.OFPActionSetNwSrc(nw_src))
elif action_type == 'SET_NW_DST':
nw_dst = ipv4_to_int(a.get('nw_dst'))
actions.append(dp.ofproto_parser.OFPActionSetNwDst(nw_dst))
elif action_type == 'SET_NW_TOS':
nw_tos = int(a.get('nw_tos', 0))
actions.append(dp.ofproto_parser.OFPActionSetNwTos(nw_tos))
elif action_type == 'SET_TP_SRC':
tp_src = int(a.get('tp_src', 0))
actions.append(dp.ofproto_parser.OFPActionSetTpSrc(tp_src))
elif action_type == 'SET_TP_DST':
tp_dst = int(a.get('tp_dst', 0))
actions.append(dp.ofproto_parser.OFPActionSetTpDst(tp_dst))
elif action_type == 'ENQUEUE':
port = int(a.get('port', ofproto_v1_0.OFPP_NONE))
queue_id = int(a.get('queue_id', 0))
actions.append(dp.ofproto_parser.OFPActionEnqueue(port, queue_id))
else:
LOG.error('Unknown action type')
return actions
def actions_to_str(acts):
actions = []
for a in acts:
action_type = a.cls_action_type
if action_type == ofproto_v1_0.OFPAT_OUTPUT:
buf = 'OUTPUT:' + str(a.port)
elif action_type == ofproto_v1_0.OFPAT_SET_VLAN_VID:
buf = 'SET_VLAN_VID:' + str(a.vlan_vid)
elif action_type == ofproto_v1_0.OFPAT_SET_VLAN_PCP:
buf = 'SET_VLAN_PCP:' + str(a.vlan_pcp)
elif action_type == ofproto_v1_0.OFPAT_STRIP_VLAN:
buf = 'STRIP_VLAN'
elif action_type == ofproto_v1_0.OFPAT_SET_DL_SRC:
buf = 'SET_DL_SRC:' + haddr_to_str(a.dl_addr)
elif action_type == ofproto_v1_0.OFPAT_SET_DL_DST:
buf = 'SET_DL_DST:' + haddr_to_str(a.dl_addr)
elif action_type == ofproto_v1_0.OFPAT_SET_NW_SRC:
buf = 'SET_NW_SRC:' + \
socket.inet_ntoa(struct.pack('!I', a.nw_addr))
elif action_type == ofproto_v1_0.OFPAT_SET_NW_DST:
buf = 'SET_NW_DST:' + \
socket.inet_ntoa(struct.pack('!I', a.nw_addr))
elif action_type == ofproto_v1_0.OFPAT_SET_NW_TOS:
buf = 'SET_NW_TOS:' + str(a.tos)
elif action_type == ofproto_v1_0.OFPAT_SET_TP_SRC:
buf = 'SET_TP_SRC:' + str(a.tp)
elif action_type == ofproto_v1_0.OFPAT_SET_TP_DST:
buf = 'SET_TP_DST:' + str(a.tp)
elif action_type == ofproto_v1_0.OFPAT_ENQUEUE:
buf = 'ENQUEUE:' + str(a.port) + ":" + str(a.queue_id)
elif action_type == ofproto_v1_0.OFPAT_VENDOR:
buf = 'VENDOR'
else:
buf = 'UNKNOWN'
actions.append(buf)
return actions
def ipv4_to_int(addr):
ip = addr.split('.')
assert len(ip) == 4
i = 0
for b in ip:
b = int(b)
i = (i << 8) | b
return i
def to_match(dp, attrs):
ofp = dp.ofproto
wildcards = ofp.OFPFW_ALL
in_port = 0
dl_src = 0
dl_dst = 0
dl_vlan = 0
dl_vlan_pcp = 0
dl_type = 0
nw_tos = 0
nw_proto = 0
nw_src = 0
nw_dst = 0
tp_src = 0
tp_dst = 0
for key, value in attrs.items():
if key == 'in_port':
in_port = int(value)
wildcards &= ~ofp.OFPFW_IN_PORT
elif key == 'dl_src':
dl_src = haddr_to_bin(value)
wildcards &= ~ofp.OFPFW_DL_SRC
elif key == 'dl_dst':
dl_dst = haddr_to_bin(value)
wildcards &= ~ofp.OFPFW_DL_DST
elif key == 'dl_vlan':
dl_vlan = int(value)
wildcards &= ~ofp.OFPFW_DL_VLAN
elif key == 'dl_vlan_pcp':
dl_vlan_pcp = int(value)
wildcards &= ~ofp.OFPFW_DL_VLAN_PCP
elif key == 'dl_type':
dl_type = int(value)
wildcards &= ~ofp.OFPFW_DL_TYPE
elif key == 'nw_tos':
nw_tos = int(value)
wildcards &= ~ofp.OFPFW_NW_TOS
elif key == 'nw_proto':
nw_proto = int(value)
wildcards &= ~ofp.OFPFW_NW_PROTO
elif key == 'nw_src':
ip = value.split('/')
nw_src = struct.unpack('!I', socket.inet_aton(ip[0]))[0]
mask = 32
if len(ip) == 2:
mask = int(ip[1])
assert 0 < mask <= 32
v = (32 - mask) << ofp.OFPFW_NW_SRC_SHIFT | \
~ofp.OFPFW_NW_SRC_MASK
wildcards &= v
elif key == 'nw_dst':
ip = value.split('/')
nw_dst = struct.unpack('!I', socket.inet_aton(ip[0]))[0]
mask = 32
if len(ip) == 2:
mask = int(ip[1])
assert 0 < mask <= 32
v = (32 - mask) << ofp.OFPFW_NW_DST_SHIFT | \
~ofp.OFPFW_NW_DST_MASK
wildcards &= v
elif key == 'tp_src':
tp_src = int(value)
wildcards &= ~ofp.OFPFW_TP_SRC
elif key == 'tp_dst':
tp_dst = int(value)
wildcards &= ~ofp.OFPFW_TP_DST
else:
LOG.error("unknown match name %s, %s, %d", key, value, len(key))
match = dp.ofproto_parser.OFPMatch(
wildcards, in_port, dl_src, dl_dst, dl_vlan, dl_vlan_pcp,
dl_type, nw_tos, nw_proto, nw_src, nw_dst, tp_src, tp_dst)
return match
def match_to_str(m):
match = {}
if ~m.wildcards & ofproto_v1_0.OFPFW_IN_PORT:
match['in_port'] = m.in_port
if ~m.wildcards & ofproto_v1_0.OFPFW_DL_SRC:
match['dl_src'] = haddr_to_str(m.dl_src)
if ~m.wildcards & ofproto_v1_0.OFPFW_DL_DST:
match['dl_dst'] = haddr_to_str(m.dl_dst)
if ~m.wildcards & ofproto_v1_0.OFPFW_DL_VLAN:
match['dl_vlan'] = m.dl_vlan
if ~m.wildcards & ofproto_v1_0.OFPFW_DL_VLAN_PCP:
match['dl_vlan_pcp'] = m.dl_vlan_pcp
if ~m.wildcards & ofproto_v1_0.OFPFW_DL_TYPE:
match['dl_type'] = m.dl_type
if ~m.wildcards & ofproto_v1_0.OFPFW_NW_TOS:
match['nw_tos'] = m.nw_tos
if ~m.wildcards & ofproto_v1_0.OFPFW_NW_PROTO:
match['nw_proto'] = m.nw_proto
if ~m.wildcards & ofproto_v1_0.OFPFW_NW_SRC_ALL:
match['nw_src'] = nw_src_to_str(m.wildcards, m.nw_src)
if ~m.wildcards & ofproto_v1_0.OFPFW_NW_DST_ALL:
match['nw_dst'] = nw_dst_to_str(m.wildcards, m.nw_dst)
if ~m.wildcards & ofproto_v1_0.OFPFW_TP_SRC:
match['tp_src'] = m.tp_src
if ~m.wildcards & ofproto_v1_0.OFPFW_TP_DST:
match['tp_dst'] = m.tp_dst
return match
def nw_src_to_str(wildcards, addr):
ip = socket.inet_ntoa(struct.pack('!I', addr))
mask = 32 - ((wildcards & ofproto_v1_0.OFPFW_NW_SRC_MASK)
>> ofproto_v1_0.OFPFW_NW_SRC_SHIFT)
if mask == 32:
mask = 0
if mask:
ip += '/%d' % mask
return ip
def nw_dst_to_str(wildcards, addr):
ip = socket.inet_ntoa(struct.pack('!I', addr))
mask = 32 - ((wildcards & ofproto_v1_0.OFPFW_NW_DST_MASK)
>> ofproto_v1_0.OFPFW_NW_DST_SHIFT)
if mask == 32:
mask = 0
if mask:
ip += '/%d' % mask
return ip
def send_stats_request(dp, stats, waiters, msgs):
dp.set_xid(stats)
waiters_per_dp = waiters.setdefault(dp.id, {})
lock = hub.Event()
previous_msg_len = len(msgs)
waiters_per_dp[stats.xid] = (lock, msgs)
dp.send_msg(stats)
lock.wait(timeout=DEFAULT_TIMEOUT)
current_msg_len = len(msgs)
while current_msg_len > previous_msg_len:
previous_msg_len = current_msg_len
lock.wait(timeout=DEFAULT_TIMEOUT)
current_msg_len = len(msgs)
if not lock.is_set():
del waiters_per_dp[stats.xid]
def get_desc_stats(dp, waiters):
stats = dp.ofproto_parser.OFPDescStatsRequest(dp, 0)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
for msg in msgs:
stats = msg.body
s = {'mfr_desc': stats.mfr_desc,
'hw_desc': stats.hw_desc,
'sw_desc': stats.sw_desc,
'serial_num': stats.serial_num,
'dp_desc': stats.dp_desc}
desc = {str(dp.id): s}
return desc
def get_queue_stats(dp, waiters):
stats = dp.ofproto_parser.OFPQueueStatsRequest(dp, 0, dp.ofproto.OFPP_ALL,
dp.ofproto.OFPQ_ALL)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
s = []
for msg in msgs:
stats = msg.body
for stat in stats:
s.append({'port_no': stat.port_no,
'queue_id': stat.queue_id,
'tx_bytes': stat.tx_bytes,
'tx_errors': stat.tx_errors,
'tx_packets': stat.tx_packets})
desc = {str(dp.id): s}
return desc
def get_flow_stats(dp, waiters, flow={}):
match = to_match(dp, flow.get('match', {}))
table_id = int(flow.get('table_id', 0xff))
out_port = int(flow.get('out_port', dp.ofproto.OFPP_NONE))
stats = dp.ofproto_parser.OFPFlowStatsRequest(
dp, 0, match, table_id, out_port)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
flows = []
for msg in msgs:
for stats in msg.body:
actions = actions_to_str(stats.actions)
match = match_to_str(stats.match)
s = {'priority': stats.priority,
'cookie': stats.cookie,
'idle_timeout': stats.idle_timeout,
'hard_timeout': stats.hard_timeout,
'actions': actions,
'match': match,
'byte_count': stats.byte_count,
'duration_sec': stats.duration_sec,
'duration_nsec': stats.duration_nsec,
'packet_count': stats.packet_count,
'table_id': stats.table_id}
flows.append(s)
flows = {str(dp.id): flows}
return flows
def get_aggregate_flow_stats(dp, waiters, flow={}):
match = to_match(dp, flow.get('match', {}))
table_id = int(flow.get('table_id', 0xff))
out_port = int(flow.get('out_port', dp.ofproto.OFPP_NONE))
stats = dp.ofproto_parser.OFPAggregateStatsRequest(
dp, 0, match, table_id, out_port)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
flows = []
for msg in msgs:
stats = msg.body
for st in stats:
s = {'packet_count': st.packet_count,
'byte_count': st.byte_count,
'flow_count': st.flow_count}
flows.append(s)
flows = {str(dp.id): flows}
return flows
def get_table_stats(dp, waiters):
stats = dp.ofproto_parser.OFPTableStatsRequest(dp, 0)
ofp = dp.ofproto
msgs = []
send_stats_request(dp, stats, waiters, msgs)
match_convert = {ofp.OFPFW_IN_PORT: 'IN_PORT',
ofp.OFPFW_DL_VLAN: 'DL_VLAN',
ofp.OFPFW_DL_SRC: 'DL_SRC',
ofp.OFPFW_DL_DST: 'DL_DST',
ofp.OFPFW_DL_TYPE: 'DL_TYPE',
ofp.OFPFW_NW_PROTO: 'NW_PROTO',
ofp.OFPFW_TP_SRC: 'TP_SRC',
ofp.OFPFW_TP_DST: 'TP_DST',
ofp.OFPFW_NW_SRC_SHIFT: 'NW_SRC_SHIFT',
ofp.OFPFW_NW_SRC_BITS: 'NW_SRC_BITS',
ofp.OFPFW_NW_SRC_MASK: 'NW_SRC_MASK',
ofp.OFPFW_NW_SRC: 'NW_SRC',
ofp.OFPFW_NW_SRC_ALL: 'NW_SRC_ALL',
ofp.OFPFW_NW_DST_SHIFT: 'NW_DST_SHIFT',
ofp.OFPFW_NW_DST_BITS: 'NW_DST_BITS',
ofp.OFPFW_NW_DST_MASK: 'NW_DST_MASK',
ofp.OFPFW_NW_DST: 'NW_DST',
ofp.OFPFW_NW_DST_ALL: 'NW_DST_ALL',
ofp.OFPFW_DL_VLAN_PCP: 'DL_VLAN_PCP',
ofp.OFPFW_NW_TOS: 'NW_TOS',
ofp.OFPFW_ALL: 'ALL',
ofp.OFPFW_ICMP_TYPE: 'ICMP_TYPE',
ofp.OFPFW_ICMP_CODE: 'ICMP_CODE'}
tables = []
for msg in msgs:
stats = msg.body
for stat in stats:
wildcards = []
for k, v in match_convert.items():
if (1 << k) & stat.wildcards:
wildcards.append(v)
s = {'table_id': stat.table_id,
'name': stat.name,
'wildcards': wildcards,
'max_entries': stat.max_entries,
'active_count': stat.active_count,
'lookup_count': stat.lookup_count,
'matched_count': stat.matched_count}
tables.append(s)
desc = {str(dp.id): tables}
return desc
def get_port_stats(dp, waiters):
stats = dp.ofproto_parser.OFPPortStatsRequest(
dp, 0, dp.ofproto.OFPP_NONE)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
ports = []
for msg in msgs:
for stats in msg.body:
s = {'port_no': stats.port_no,
'rx_packets': stats.rx_packets,
'tx_packets': stats.tx_packets,
'rx_bytes': stats.rx_bytes,
'tx_bytes': stats.tx_bytes,
'rx_dropped': stats.rx_dropped,
'tx_dropped': stats.tx_dropped,
'rx_errors': stats.rx_errors,
'tx_errors': stats.tx_errors,
'rx_frame_err': stats.rx_frame_err,
'rx_over_err': stats.rx_over_err,
'rx_crc_err': stats.rx_crc_err,
'collisions': stats.collisions}
ports.append(s)
ports = {str(dp.id): ports}
return ports
def get_port_desc(dp, waiters):
stats = dp.ofproto_parser.OFPFeaturesRequest(dp)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
descs = []
for msg in msgs:
stats = msg.ports
for stat in stats.values():
d = {'port_no': stat.port_no,
'hw_addr': stat.hw_addr,
'name': stat.name,
'config': stat.config,
'state': stat.state,
'curr': stat.curr,
'advertised': stat.advertised,
'supported': stat.supported,
'peer': stat.peer}
descs.append(d)
descs = {str(dp.id): descs}
return descs
def mod_flow_entry(dp, flow, cmd):
cookie = int(flow.get('cookie', 0))
priority = int(flow.get('priority',
dp.ofproto.OFP_DEFAULT_PRIORITY))
buffer_id = int(flow.get('buffer_id', dp.ofproto.OFP_NO_BUFFER))
out_port = int(flow.get('out_port', dp.ofproto.OFPP_NONE))
flags = int(flow.get('flags', 0))
idle_timeout = int(flow.get('idle_timeout', 0))
hard_timeout = int(flow.get('hard_timeout', 0))
actions = to_actions(dp, flow.get('actions', []))
match = to_match(dp, flow.get('match', {}))
flow_mod = dp.ofproto_parser.OFPFlowMod(
datapath=dp, match=match, cookie=cookie,
command=cmd, idle_timeout=idle_timeout,
hard_timeout=hard_timeout, priority=priority,
buffer_id=buffer_id, out_port=out_port,
flags=flags,
actions=actions)
dp.send_msg(flow_mod)
def delete_flow_entry(dp):
match = dp.ofproto_parser.OFPMatch(
dp.ofproto.OFPFW_ALL, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
flow_mod = dp.ofproto_parser.OFPFlowMod(
datapath=dp, match=match, cookie=0,
command=dp.ofproto.OFPFC_DELETE)
dp.send_msg(flow_mod)
def mod_port_behavior(dp, port_config):
port_no = int(port_config.get('port_no', 0))
hw_addr = port_config.get('hw_addr')
config = int(port_config.get('config', 0))
mask = int(port_config.get('mask', 0))
advertise = int(port_config.get('advertise'))
port_mod = dp.ofproto_parser.OFPPortMod(
dp, port_no, hw_addr, config, mask, advertise)
dp.send_msg(port_mod)
| apache-2.0 |
immesys/RiSyn | tests/pthread_tls/tests/01-run.py | 6 | 1349 | #!/usr/bin/env python3
import os
import sys
def _check_test_output(child):
child.expect('show tls values:')
for i in range(20):
if i != 5:
child.expect('key\[%d\]: \d+, val: %d'
% (i, i + 1 if i != 3 else 42))
def testfunc(child):
child.expect('START')
child.expect('-= TEST 1 - create 20 tls with sequencial values 0...19 =-')
_check_test_output(child)
child.expect('-= TEST 2 - '
'delete deliberate key \(key\[5\]:\d+\) =-')
_check_test_output(child)
child.expect('-= TEST 3 - create new tls =-')
_check_test_output(child)
child.expect('-= TEST 4 - delete all keys =-')
child.expect('show tls values:')
child.expect('-= TEST 5 - try delete non-existing key =-')
child.expect('try to delete returns: 0')
child.expect('-= TEST 6 - add key and delete without a tls =-')
child.expect('created key: \d+')
child.expect('try to delete returns: 0')
child.expect('-= TEST 7 - add key without tls =-')
child.expect('created key: \d+')
child.expect('test_7_val: (0|\(nil\))')
child.expect('tls tests finished.')
child.expect('SUCCESS')
if __name__ == "__main__":
sys.path.append(os.path.join(os.environ['RIOTBASE'], 'dist/tools/testrunner'))
from testrunner import run
sys.exit(run(testfunc))
| lgpl-2.1 |
AntaresOne/AntaresCore-Kernel-h815 | Documentation/target/tcm_mod_builder.py | 2358 | 40707 | #!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: nab@kernel.org
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
| gpl-2.0 |
akmetainfo/opencorpora | scripts/ma_pools/unpublish_pools.py | 1 | 1437 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import time
import ConfigParser, MySQLdb
from MySQLdb.cursors import DictCursor
POOL_STATUS_PUBLISHED = 3
POOL_STATUS_UNPUBLISHED = 4
def check_pools(dbh):
dbh.execute("""
SELECT pool_id
FROM morph_annot_pools
WHERE status = {0}
AND pool_id NOT IN (
SELECT DISTINCT pool_id
FROM morph_annot_instances
LEFT JOIN morph_annot_samples USING (sample_id)
LEFT JOIN morph_annot_pools USING (pool_id)
WHERE status = {0}
AND answer = 0
)
""".format(POOL_STATUS_PUBLISHED))
for pool in dbh.fetchall():
set_pool_status(dbh, pool['pool_id'], POOL_STATUS_UNPUBLISHED)
def set_pool_status(dbh, pool_id, status):
dbh.execute("UPDATE morph_annot_pools SET status={0}, updated_ts={2} WHERE pool_id={1} LIMIT 1".format(status, pool_id, int(time.time())))
def main():
config = ConfigParser.ConfigParser()
config.read(sys.argv[1])
hostname = config.get('mysql', 'host')
dbname = config.get('mysql', 'dbname')
username = config.get('mysql', 'user')
password = config.get('mysql', 'passwd')
db = MySQLdb.connect(hostname, username, password, dbname, use_unicode=True, charset="utf8")
dbh = db.cursor(DictCursor)
dbh.execute('START TRANSACTION')
check_pools(dbh)
db.commit()
if __name__ == "__main__":
main()
| gpl-2.0 |
jlegendary/servo | tests/wpt/web-platform-tests/tools/html5lib/html5lib/treebuilders/dom.py | 920 | 8469 | from __future__ import absolute_import, division, unicode_literals
from xml.dom import minidom, Node
import weakref
from . import _base
from .. import constants
from ..constants import namespaces
from ..utils import moduleFactoryFactory
def getDomBuilder(DomImplementation):
Dom = DomImplementation
class AttrList(object):
def __init__(self, element):
self.element = element
def __iter__(self):
return list(self.element.attributes.items()).__iter__()
def __setitem__(self, name, value):
self.element.setAttribute(name, value)
def __len__(self):
return len(list(self.element.attributes.items()))
def items(self):
return [(item[0], item[1]) for item in
list(self.element.attributes.items())]
def keys(self):
return list(self.element.attributes.keys())
def __getitem__(self, name):
return self.element.getAttribute(name)
def __contains__(self, name):
if isinstance(name, tuple):
raise NotImplementedError
else:
return self.element.hasAttribute(name)
class NodeBuilder(_base.Node):
def __init__(self, element):
_base.Node.__init__(self, element.nodeName)
self.element = element
namespace = property(lambda self: hasattr(self.element, "namespaceURI")
and self.element.namespaceURI or None)
def appendChild(self, node):
node.parent = self
self.element.appendChild(node.element)
def insertText(self, data, insertBefore=None):
text = self.element.ownerDocument.createTextNode(data)
if insertBefore:
self.element.insertBefore(text, insertBefore.element)
else:
self.element.appendChild(text)
def insertBefore(self, node, refNode):
self.element.insertBefore(node.element, refNode.element)
node.parent = self
def removeChild(self, node):
if node.element.parentNode == self.element:
self.element.removeChild(node.element)
node.parent = None
def reparentChildren(self, newParent):
while self.element.hasChildNodes():
child = self.element.firstChild
self.element.removeChild(child)
newParent.element.appendChild(child)
self.childNodes = []
def getAttributes(self):
return AttrList(self.element)
def setAttributes(self, attributes):
if attributes:
for name, value in list(attributes.items()):
if isinstance(name, tuple):
if name[0] is not None:
qualifiedName = (name[0] + ":" + name[1])
else:
qualifiedName = name[1]
self.element.setAttributeNS(name[2], qualifiedName,
value)
else:
self.element.setAttribute(
name, value)
attributes = property(getAttributes, setAttributes)
def cloneNode(self):
return NodeBuilder(self.element.cloneNode(False))
def hasContent(self):
return self.element.hasChildNodes()
def getNameTuple(self):
if self.namespace is None:
return namespaces["html"], self.name
else:
return self.namespace, self.name
nameTuple = property(getNameTuple)
class TreeBuilder(_base.TreeBuilder):
def documentClass(self):
self.dom = Dom.getDOMImplementation().createDocument(None, None, None)
return weakref.proxy(self)
def insertDoctype(self, token):
name = token["name"]
publicId = token["publicId"]
systemId = token["systemId"]
domimpl = Dom.getDOMImplementation()
doctype = domimpl.createDocumentType(name, publicId, systemId)
self.document.appendChild(NodeBuilder(doctype))
if Dom == minidom:
doctype.ownerDocument = self.dom
def elementClass(self, name, namespace=None):
if namespace is None and self.defaultNamespace is None:
node = self.dom.createElement(name)
else:
node = self.dom.createElementNS(namespace, name)
return NodeBuilder(node)
def commentClass(self, data):
return NodeBuilder(self.dom.createComment(data))
def fragmentClass(self):
return NodeBuilder(self.dom.createDocumentFragment())
def appendChild(self, node):
self.dom.appendChild(node.element)
def testSerializer(self, element):
return testSerializer(element)
def getDocument(self):
return self.dom
def getFragment(self):
return _base.TreeBuilder.getFragment(self).element
def insertText(self, data, parent=None):
data = data
if parent != self:
_base.TreeBuilder.insertText(self, data, parent)
else:
# HACK: allow text nodes as children of the document node
if hasattr(self.dom, '_child_node_types'):
if Node.TEXT_NODE not in self.dom._child_node_types:
self.dom._child_node_types = list(self.dom._child_node_types)
self.dom._child_node_types.append(Node.TEXT_NODE)
self.dom.appendChild(self.dom.createTextNode(data))
implementation = DomImplementation
name = None
def testSerializer(element):
element.normalize()
rv = []
def serializeElement(element, indent=0):
if element.nodeType == Node.DOCUMENT_TYPE_NODE:
if element.name:
if element.publicId or element.systemId:
publicId = element.publicId or ""
systemId = element.systemId or ""
rv.append("""|%s<!DOCTYPE %s "%s" "%s">""" %
(' ' * indent, element.name, publicId, systemId))
else:
rv.append("|%s<!DOCTYPE %s>" % (' ' * indent, element.name))
else:
rv.append("|%s<!DOCTYPE >" % (' ' * indent,))
elif element.nodeType == Node.DOCUMENT_NODE:
rv.append("#document")
elif element.nodeType == Node.DOCUMENT_FRAGMENT_NODE:
rv.append("#document-fragment")
elif element.nodeType == Node.COMMENT_NODE:
rv.append("|%s<!-- %s -->" % (' ' * indent, element.nodeValue))
elif element.nodeType == Node.TEXT_NODE:
rv.append("|%s\"%s\"" % (' ' * indent, element.nodeValue))
else:
if (hasattr(element, "namespaceURI") and
element.namespaceURI is not None):
name = "%s %s" % (constants.prefixes[element.namespaceURI],
element.nodeName)
else:
name = element.nodeName
rv.append("|%s<%s>" % (' ' * indent, name))
if element.hasAttributes():
attributes = []
for i in range(len(element.attributes)):
attr = element.attributes.item(i)
name = attr.nodeName
value = attr.value
ns = attr.namespaceURI
if ns:
name = "%s %s" % (constants.prefixes[ns], attr.localName)
else:
name = attr.nodeName
attributes.append((name, value))
for name, value in sorted(attributes):
rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
indent += 2
for child in element.childNodes:
serializeElement(child, indent)
serializeElement(element, 0)
return "\n".join(rv)
return locals()
# The actual means to get a module!
getDomModule = moduleFactoryFactory(getDomBuilder)
| mpl-2.0 |
meganbkratz/acq4 | acq4/pyqtgraph/opengl/items/GLSurfacePlotItem.py | 52 | 5204 | from OpenGL.GL import *
from .GLMeshItem import GLMeshItem
from .. MeshData import MeshData
from ...Qt import QtGui
import numpy as np
__all__ = ['GLSurfacePlotItem']
class GLSurfacePlotItem(GLMeshItem):
"""
**Bases:** :class:`GLMeshItem <pyqtgraph.opengl.GLMeshItem>`
Displays a surface plot on a regular x,y grid
"""
def __init__(self, x=None, y=None, z=None, colors=None, **kwds):
"""
The x, y, z, and colors arguments are passed to setData().
All other keyword arguments are passed to GLMeshItem.__init__().
"""
self._x = None
self._y = None
self._z = None
self._color = None
self._vertexes = None
self._meshdata = MeshData()
GLMeshItem.__init__(self, meshdata=self._meshdata, **kwds)
self.setData(x, y, z, colors)
def setData(self, x=None, y=None, z=None, colors=None):
"""
Update the data in this surface plot.
============== =====================================================================
**Arguments:**
x,y 1D arrays of values specifying the x,y positions of vertexes in the
grid. If these are omitted, then the values will be assumed to be
integers.
z 2D array of height values for each grid vertex.
colors (width, height, 4) array of vertex colors.
============== =====================================================================
All arguments are optional.
Note that if vertex positions are updated, the normal vectors for each triangle must
be recomputed. This is somewhat expensive if the surface was initialized with smooth=False
and very expensive if smooth=True. For faster performance, initialize with
computeNormals=False and use per-vertex colors or a normal-independent shader program.
"""
if x is not None:
if self._x is None or len(x) != len(self._x):
self._vertexes = None
self._x = x
if y is not None:
if self._y is None or len(y) != len(self._y):
self._vertexes = None
self._y = y
if z is not None:
#if self._x is None:
#self._x = np.arange(z.shape[0])
#self._vertexes = None
#if self._y is None:
#self._y = np.arange(z.shape[1])
#self._vertexes = None
if self._x is not None and z.shape[0] != len(self._x):
raise Exception('Z values must have shape (len(x), len(y))')
if self._y is not None and z.shape[1] != len(self._y):
raise Exception('Z values must have shape (len(x), len(y))')
self._z = z
if self._vertexes is not None and self._z.shape != self._vertexes.shape[:2]:
self._vertexes = None
if colors is not None:
self._colors = colors
self._meshdata.setVertexColors(colors)
if self._z is None:
return
updateMesh = False
newVertexes = False
## Generate vertex and face array
if self._vertexes is None:
newVertexes = True
self._vertexes = np.empty((self._z.shape[0], self._z.shape[1], 3), dtype=float)
self.generateFaces()
self._meshdata.setFaces(self._faces)
updateMesh = True
## Copy x, y, z data into vertex array
if newVertexes or x is not None:
if x is None:
if self._x is None:
x = np.arange(self._z.shape[0])
else:
x = self._x
self._vertexes[:, :, 0] = x.reshape(len(x), 1)
updateMesh = True
if newVertexes or y is not None:
if y is None:
if self._y is None:
y = np.arange(self._z.shape[1])
else:
y = self._y
self._vertexes[:, :, 1] = y.reshape(1, len(y))
updateMesh = True
if newVertexes or z is not None:
self._vertexes[...,2] = self._z
updateMesh = True
## Update MeshData
if updateMesh:
self._meshdata.setVertexes(self._vertexes.reshape(self._vertexes.shape[0]*self._vertexes.shape[1], 3))
self.meshDataChanged()
def generateFaces(self):
cols = self._z.shape[1]-1
rows = self._z.shape[0]-1
faces = np.empty((cols*rows*2, 3), dtype=np.uint)
rowtemplate1 = np.arange(cols).reshape(cols, 1) + np.array([[0, 1, cols+1]])
rowtemplate2 = np.arange(cols).reshape(cols, 1) + np.array([[cols+1, 1, cols+2]])
for row in range(rows):
start = row * cols * 2
faces[start:start+cols] = rowtemplate1 + row * (cols+1)
faces[start+cols:start+(cols*2)] = rowtemplate2 + row * (cols+1)
self._faces = faces
| mit |
junhuac/MQUIC | src/tools/cr/cr/actions/linux.py | 16 | 1318 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to hold linux specific action implementations."""
import cr
class LinuxRunner(cr.Runner):
"""An implementation of cr.Runner for the linux platform.
This supports directly executing the binaries from the output directory.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Kill(self, targets, arguments):
# Not needed on Linux because the target generally runs in the same shell
# and can be killed using Ctrl-C.
pass
def Run(self, target, arguments):
with target:
cr.Host.Execute('{CR_BINARY}', '{CR_RUN_ARGUMENTS}', *arguments)
def Test(self, target, arguments):
self.Run(target, arguments)
class LinuxInstaller(cr.Installer):
"""An implementation of cr.Installer for the linux platform.
This does nothing, the linux runner works from the output directory, there
is no need to install anywhere.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Uninstall(self, targets, arguments):
pass
def Install(self, targets, arguments):
pass
def Reinstall(self, targets, arguments):
pass
| mit |
Geew/shoppingDemo | app.py | 1 | 1437 | # coding: utf8
#!/usr/bin/env python
import os.path
import sys
sys.path.append(os.path.dirname(__file__))
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
from config import configs
class Application(tornado.web.Application):
def __init__(self):
handlers = []
handler_mods = [
'manager',
]
for i in handler_mods:
m = __import__('handler.' + i, fromlist=['url_spec'])
handlers.extend(m.url_spec())
handler_mods = [
'index',
]
for i in handler_mods:
m = __import__(i, fromlist=['url_spec'])
handlers.extend(m.url_spec())
settings = dict(
template_path=os.path.join(os.path.dirname(__file__), 'templates'),
static_path=os.path.join(os.path.dirname(__file__), 'static'),
cookie_secret=configs['cookie_secret'],
autoescape=None, # disable escaping
debug=configs['debug'],
gzip=True,
#xsrf_cookies=True, # csrf enabled
)
tornado.web.Application.__init__(self, handlers, **settings)
def main():
http_server = tornado.httpserver.HTTPServer(Application(), xheaders=True)
http_server.listen(configs['port'])
try:
tornado.ioloop.IOLoop.instance().start()
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main() | gpl-2.0 |
dylanGeng/BuildingMachineLearningSystemsWithPython | ch02/figure2.py | 22 | 1732 | # This code is supporting material for the book
# Building Machine Learning Systems with Python
# by Willi Richert and Luis Pedro Coelho
# published by PACKT Publishing
#
# It is made available under the MIT License
COLOUR_FIGURE = False
from matplotlib import pyplot as plt
from sklearn.datasets import load_iris
data = load_iris()
features = data.data
feature_names = data.feature_names
target = data.target
target_names = data.target_names
# We use NumPy fancy indexing to get an array of strings:
labels = target_names[target]
is_setosa = (labels == 'setosa')
features = features[~is_setosa]
labels = labels[~is_setosa]
is_virginica = (labels == 'virginica')
# Hand fixed thresholds:
t = 1.65
t2 = 1.75
# Features to use: 3 & 2
f0, f1 = 3, 2
if COLOUR_FIGURE:
area1c = (1., .8, .8)
area2c = (.8, .8, 1.)
else:
area1c = (1., 1, 1)
area2c = (.7, .7, .7)
# Plot from 90% of smallest value to 110% of largest value
# (all feature values are positive, otherwise this would not work very well)
x0 = features[:, f0].min() * .9
x1 = features[:, f0].max() * 1.1
y0 = features[:, f1].min() * .9
y1 = features[:, f1].max() * 1.1
fig,ax = plt.subplots()
ax.fill_between([t, x1], [y0, y0], [y1, y1], color=area2c)
ax.fill_between([x0, t], [y0, y0], [y1, y1], color=area1c)
ax.plot([t, t], [y0, y1], 'k--', lw=2)
ax.plot([t2, t2], [y0, y1], 'k:', lw=2)
ax.scatter(features[is_virginica, f0],
features[is_virginica, f1], c='b', marker='o', s=40)
ax.scatter(features[~is_virginica, f0],
features[~is_virginica, f1], c='r', marker='x', s=40)
ax.set_ylim(y0, y1)
ax.set_xlim(x0, x1)
ax.set_xlabel(feature_names[f0])
ax.set_ylabel(feature_names[f1])
fig.tight_layout()
fig.savefig('figure2.png')
| mit |
Jonas-Drotleff/YoWhenReady | requests/packages/chardet/langhebrewmodel.py | 2763 | 11318 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Simon Montagu
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Shoshannah Forbes - original C code (?)
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Windows-1255 language model
# Character Mapping Table:
win1255_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40
78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50
253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60
66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70
124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214,
215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221,
34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227,
106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234,
30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237,
238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250,
9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23,
12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 98.4004%
# first 1024 sequences: 1.5981%
# rest sequences: 0.087%
# negative sequences: 0.0015%
HebrewLangModel = (
0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0,
3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,
1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,
1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3,
1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2,
1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2,
0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1,
0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0,
0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2,
0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2,
0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2,
0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2,
0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2,
0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2,
0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2,
0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3,
0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0,
0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0,
0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0,
0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1,
1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1,
1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1,
2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,
0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1,
0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0,
)
Win1255HebrewModel = {
'charToOrderMap': win1255_CharToOrderMap,
'precedenceMatrix': HebrewLangModel,
'mTypicalPositiveRatio': 0.984004,
'keepEnglishLetter': False,
'charsetName': "windows-1255"
}
# flake8: noqa
| apache-2.0 |
amisrs/one-eighty | angular_flask/lib/python2.7/site-packages/sqlalchemy/orm/identity.py | 12 | 6992 | # orm/identity.py
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import weakref
from . import attributes
class IdentityMap(dict):
def __init__(self):
self._modified = set()
self._wr = weakref.ref(self)
def replace(self, state):
raise NotImplementedError()
def add(self, state):
raise NotImplementedError()
def update(self, dict):
raise NotImplementedError("IdentityMap uses add() to insert data")
def clear(self):
raise NotImplementedError("IdentityMap uses remove() to remove data")
def _manage_incoming_state(self, state):
state._instance_dict = self._wr
if state.modified:
self._modified.add(state)
def _manage_removed_state(self, state):
del state._instance_dict
self._modified.discard(state)
def _dirty_states(self):
return self._modified
def check_modified(self):
"""return True if any InstanceStates present have been marked
as 'modified'.
"""
return bool(self._modified)
def has_key(self, key):
return key in self
def popitem(self):
raise NotImplementedError("IdentityMap uses remove() to remove data")
def pop(self, key, *args):
raise NotImplementedError("IdentityMap uses remove() to remove data")
def setdefault(self, key, default=None):
raise NotImplementedError("IdentityMap uses add() to insert data")
def copy(self):
raise NotImplementedError()
def __setitem__(self, key, value):
raise NotImplementedError("IdentityMap uses add() to insert data")
def __delitem__(self, key):
raise NotImplementedError("IdentityMap uses remove() to remove data")
class WeakInstanceDict(IdentityMap):
def __init__(self):
IdentityMap.__init__(self)
def __getitem__(self, key):
state = dict.__getitem__(self, key)
o = state.obj()
if o is None:
raise KeyError, key
return o
def __contains__(self, key):
try:
if dict.__contains__(self, key):
state = dict.__getitem__(self, key)
o = state.obj()
else:
return False
except KeyError:
return False
else:
return o is not None
def contains_state(self, state):
return dict.get(self, state.key) is state
def replace(self, state):
if dict.__contains__(self, state.key):
existing = dict.__getitem__(self, state.key)
if existing is not state:
self._manage_removed_state(existing)
else:
return
dict.__setitem__(self, state.key, state)
self._manage_incoming_state(state)
def add(self, state):
key = state.key
# inline of self.__contains__
if dict.__contains__(self, key):
try:
existing_state = dict.__getitem__(self, key)
if existing_state is not state:
o = existing_state.obj()
if o is not None:
raise AssertionError(
"A conflicting state is already "
"present in the identity map for key %r"
% (key, ))
else:
return
except KeyError:
pass
dict.__setitem__(self, key, state)
self._manage_incoming_state(state)
def get(self, key, default=None):
state = dict.get(self, key, default)
if state is default:
return default
o = state.obj()
if o is None:
return default
return o
def _items(self):
values = self.all_states()
result = []
for state in values:
value = state.obj()
if value is not None:
result.append((state.key, value))
return result
def _values(self):
values = self.all_states()
result = []
for state in values:
value = state.obj()
if value is not None:
result.append(value)
return result
# Py3K
#def items(self):
# return iter(self._items())
#
#def values(self):
# return iter(self._values())
# Py2K
items = _items
def iteritems(self):
return iter(self.items())
values = _values
def itervalues(self):
return iter(self.values())
# end Py2K
def all_states(self):
# Py3K
# return list(dict.values(self))
# Py2K
return dict.values(self)
# end Py2K
def discard(self, state):
st = dict.get(self, state.key, None)
if st is state:
dict.pop(self, state.key, None)
self._manage_removed_state(state)
def prune(self):
return 0
class StrongInstanceDict(IdentityMap):
def all_states(self):
return [attributes.instance_state(o) for o in self.itervalues()]
def contains_state(self, state):
return (
state.key in self and
attributes.instance_state(self[state.key]) is state)
def replace(self, state):
if dict.__contains__(self, state.key):
existing = dict.__getitem__(self, state.key)
existing = attributes.instance_state(existing)
if existing is not state:
self._manage_removed_state(existing)
else:
return
dict.__setitem__(self, state.key, state.obj())
self._manage_incoming_state(state)
def add(self, state):
if state.key in self:
if attributes.instance_state(dict.__getitem__(self,
state.key)) is not state:
raise AssertionError('A conflicting state is already '
'present in the identity map for key %r'
% (state.key, ))
else:
dict.__setitem__(self, state.key, state.obj())
self._manage_incoming_state(state)
def discard(self, state):
obj = dict.get(self, state.key, None)
if obj is not None:
st = attributes.instance_state(obj)
if st is state:
dict.pop(self, state.key, None)
self._manage_removed_state(state)
def prune(self):
"""prune unreferenced, non-dirty states."""
ref_count = len(self)
dirty = [s.obj() for s in self.all_states() if s.modified]
# work around http://bugs.python.org/issue6149
keepers = weakref.WeakValueDictionary()
keepers.update(self)
dict.clear(self)
dict.update(self, keepers)
self.modified = bool(dirty)
return ref_count - len(self)
| mit |
MenZil/kuma | vendor/packages/translate/lang/fa.py | 26 | 2366 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2007, 2010 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""This module represents the Persian language.
.. seealso:: http://en.wikipedia.org/wiki/Persian_language
"""
import re
from translate.lang import common
def guillemets(text):
def convertquotation(match):
prefix = match.group(1)
# Let's see that we didn't perhaps match an XML tag property like
# <a href="something">
if prefix == u"=":
return match.group(0)
return u"%s«%s»" % (prefix, match.group(2))
# Check that there is an even number of double quotes, otherwise it is
# probably not safe to convert them.
if text.count(u'"') % 2 == 0:
text = re.sub('(.|^)"([^"]+)"', convertquotation, text)
singlecount = text.count(u"'")
if singlecount:
if singlecount == text.count(u'`'):
text = re.sub("(.|^)`([^']+)'", convertquotation, text)
elif singlecount % 2 == 0:
text = re.sub("(.|^)'([^']+)'", convertquotation, text)
text = re.sub(u'(.|^)“([^”]+)”', convertquotation, text)
return text
class fa(common.Common):
"""This class represents Persian."""
listseperator = u"، "
puncdict = {
u",": u"،",
u";": u"؛",
u"?": u"؟",
#This causes problems with variables, so commented out for now:
#u"%": u"٪",
}
ignoretests = ["startcaps", "simplecaps"]
#TODO: check persian numerics
#TODO: zwj and zwnj?
@classmethod
def punctranslate(cls, text):
"""Implement "French" quotation marks."""
text = super(cls, cls).punctranslate(text)
return guillemets(text)
| mpl-2.0 |
ktdreyer/teuthology | teuthology/task/mpi.py | 10 | 4558 | """
Start mpi processes (and allow commands to be run inside process)
"""
from StringIO import StringIO
import logging
import re
from teuthology import misc as teuthology
log = logging.getLogger(__name__)
def _check_mpi_version(remotes):
"""
Retrieve the MPI version from each of `remotes` and raise an exception
if they are not all the same version.
"""
versions = set()
for remote in remotes:
version_str = remote.run(args=["mpiexec", "--version"], stdout=StringIO()).stdout.getvalue()
try:
version = re.search("^\s+Version:\s+(.+)$", version_str, re.MULTILINE).group(1)
except AttributeError:
raise RuntimeError("Malformed MPI version output: {0}".format(version_str))
else:
versions.add(version)
if len(versions) != 1:
raise RuntimeError("MPI version mismatch. Versions are: {0}".format(", ".join(versions)))
else:
log.info("MPI version {0}".format(list(versions)[0]))
def task(ctx, config):
"""
Setup MPI and execute commands
Example that starts an MPI process on specific clients::
tasks:
- ceph:
- ceph-fuse: [client.0, client.1]
- ssh_keys:
- mpi:
nodes: [client.0, client.1]
exec: ior ...
Example that starts MPI processes on all clients::
tasks:
- ceph:
- ceph-fuse:
- ssh_keys:
- mpi:
exec: ior ...
Example that starts MPI processes on all roles::
tasks:
- ceph:
- ssh_keys:
- mpi:
nodes: all
exec: ...
Example that specifies a working directory for MPI processes:
tasks:
- ceph:
- ceph-fuse:
- pexec:
clients:
- ln -s {testdir}/mnt.* {testdir}/gmnt
- ssh_keys:
- mpi:
exec: fsx-mpi
workdir: {testdir}/gmnt
- pexec:
clients:
- rm -f {testdir}/gmnt
:param ctx: Context
:param config: Configuration
"""
assert isinstance(config, dict), 'task mpi got invalid config'
assert 'exec' in config, 'task mpi got invalid config, missing exec'
testdir = teuthology.get_testdir(ctx)
mpiexec = config['exec'].replace('$TESTDIR', testdir)
hosts = []
remotes = []
master_remote = None
if 'nodes' in config:
if isinstance(config['nodes'], basestring) and config['nodes'] == 'all':
for role in teuthology.all_roles(ctx.cluster):
(remote,) = ctx.cluster.only(role).remotes.iterkeys()
ip,port = remote.ssh.get_transport().getpeername()
hosts.append(ip)
remotes.append(remote)
(master_remote,) = ctx.cluster.only(config['nodes'][0]).remotes.iterkeys()
elif isinstance(config['nodes'], list):
for role in config['nodes']:
(remote,) = ctx.cluster.only(role).remotes.iterkeys()
ip,port = remote.ssh.get_transport().getpeername()
hosts.append(ip)
remotes.append(remote)
(master_remote,) = ctx.cluster.only(config['nodes'][0]).remotes.iterkeys()
else:
roles = ['client.{id}'.format(id=id_) for id_ in teuthology.all_roles_of_type(ctx.cluster, 'client')]
(master_remote,) = ctx.cluster.only(roles[0]).remotes.iterkeys()
for role in roles:
(remote,) = ctx.cluster.only(role).remotes.iterkeys()
ip,port = remote.ssh.get_transport().getpeername()
hosts.append(ip)
remotes.append(remote)
# mpich is sensitive to different versions on different nodes
_check_mpi_version(remotes)
workdir = []
if 'workdir' in config:
workdir = ['-wdir', config['workdir'].replace('$TESTDIR', testdir) ]
log.info('mpi rank 0 is: {name}'.format(name=master_remote.name))
# write out the mpi hosts file
log.info('mpi nodes: [%s]' % (', '.join(hosts)))
teuthology.write_file(remote=master_remote,
path='{tdir}/mpi-hosts'.format(tdir=testdir),
data='\n'.join(hosts))
log.info('mpiexec on {name}: {cmd}'.format(name=master_remote.name, cmd=mpiexec))
args=['mpiexec', '-f', '{tdir}/mpi-hosts'.format(tdir=testdir)]
args.extend(workdir)
args.extend(mpiexec.split(' '))
master_remote.run(args=args, )
log.info('mpi task completed')
master_remote.run(args=['rm', '{tdir}/mpi-hosts'.format(tdir=testdir)])
| mit |
damdam-s/OCB | openerp/tools/graph.py | 441 | 26118 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import operator
import math
class graph(object):
def __init__(self, nodes, transitions, no_ancester=None):
"""Initialize graph's object
@param nodes list of ids of nodes in the graph
@param transitions list of edges in the graph in the form (source_node, destination_node)
@param no_ancester list of nodes with no incoming edges
"""
self.nodes = nodes or []
self.edges = transitions or []
self.no_ancester = no_ancester or {}
trans = {}
for t in transitions:
trans.setdefault(t[0], [])
trans[t[0]].append(t[1])
self.transitions = trans
self.result = {}
def init_rank(self):
"""Computes rank of the nodes of the graph by finding initial feasible tree
"""
self.edge_wt = {}
for link in self.links:
self.edge_wt[link] = self.result[link[1]]['x'] - self.result[link[0]]['x']
tot_node = len(self.partial_order)
#do until all the nodes in the component are searched
while self.tight_tree()<tot_node:
list_node = []
list_edge = []
for node in self.nodes:
if node not in self.reachable_nodes:
list_node.append(node)
for edge in self.edge_wt:
if edge not in self.tree_edges:
list_edge.append(edge)
slack = 100
for edge in list_edge:
if ((edge[0] in self.reachable_nodes and edge[1] not in self.reachable_nodes) or
(edge[1] in self.reachable_nodes and edge[0] not in self.reachable_nodes)):
if slack > self.edge_wt[edge]-1:
slack = self.edge_wt[edge]-1
new_edge = edge
if new_edge[0] not in self.reachable_nodes:
delta = -(self.edge_wt[new_edge]-1)
else:
delta = self.edge_wt[new_edge]-1
for node in self.result:
if node in self.reachable_nodes:
self.result[node]['x'] += delta
for edge in self.edge_wt:
self.edge_wt[edge] = self.result[edge[1]]['x'] - self.result[edge[0]]['x']
self.init_cutvalues()
def tight_tree(self):
self.reachable_nodes = []
self.tree_edges = []
self.reachable_node(self.start)
return len(self.reachable_nodes)
def reachable_node(self, node):
"""Find the nodes of the graph which are only 1 rank apart from each other
"""
if node not in self.reachable_nodes:
self.reachable_nodes.append(node)
for edge in self.edge_wt:
if edge[0]==node:
if self.edge_wt[edge]==1:
self.tree_edges.append(edge)
if edge[1] not in self.reachable_nodes:
self.reachable_nodes.append(edge[1])
self.reachable_node(edge[1])
def init_cutvalues(self):
"""Initailize cut values of edges of the feasible tree.
Edges with negative cut-values are removed from the tree to optimize rank assignment
"""
self.cut_edges = {}
self.head_nodes = []
i=0
for edge in self.tree_edges:
self.head_nodes = []
rest_edges = []
rest_edges += self.tree_edges
del rest_edges[i]
self.head_component(self.start, rest_edges)
i+=1
positive = 0
negative = 0
for source_node in self.transitions:
if source_node in self.head_nodes:
for dest_node in self.transitions[source_node]:
if dest_node not in self.head_nodes:
negative+=1
else:
for dest_node in self.transitions[source_node]:
if dest_node in self.head_nodes:
positive+=1
self.cut_edges[edge] = positive - negative
def head_component(self, node, rest_edges):
"""Find nodes which are reachable from the starting node, after removing an edge
"""
if node not in self.head_nodes:
self.head_nodes.append(node)
for edge in rest_edges:
if edge[0]==node:
self.head_component(edge[1],rest_edges)
def process_ranking(self, node, level=0):
"""Computes initial feasible ranking after making graph acyclic with depth-first search
"""
if node not in self.result:
self.result[node] = {'y': None, 'x':level, 'mark':0}
else:
if level > self.result[node]['x']:
self.result[node]['x'] = level
if self.result[node]['mark']==0:
self.result[node]['mark'] = 1
for sec_end in self.transitions.get(node, []):
self.process_ranking(sec_end, level+1)
def make_acyclic(self, parent, node, level, tree):
"""Computes Partial-order of the nodes with depth-first search
"""
if node not in self.partial_order:
self.partial_order[node] = {'level':level, 'mark':0}
if parent:
tree.append((parent, node))
if self.partial_order[node]['mark']==0:
self.partial_order[node]['mark'] = 1
for sec_end in self.transitions.get(node, []):
self.links.append((node, sec_end))
self.make_acyclic(node, sec_end, level+1, tree)
return tree
def rev_edges(self, tree):
"""reverse the direction of the edges whose source-node-partail_order> destination-node-partail_order
to make the graph acyclic
"""
Is_Cyclic = False
i=0
for link in self.links:
src = link[0]
des = link[1]
edge_len = self.partial_order[des]['level'] - self.partial_order[src]['level']
if edge_len < 0:
del self.links[i]
self.links.insert(i, (des, src))
self.transitions[src].remove(des)
self.transitions.setdefault(des, []).append(src)
Is_Cyclic = True
elif math.fabs(edge_len) > 1:
Is_Cyclic = True
i += 1
return Is_Cyclic
def exchange(self, e, f):
"""Exchange edges to make feasible-tree optimized
:param e: edge with negative cut-value
:param f: new edge with minimum slack-value
"""
del self.tree_edges[self.tree_edges.index(e)]
self.tree_edges.append(f)
self.init_cutvalues()
def enter_edge(self, edge):
"""Finds a new_edge with minimum slack value to replace an edge with negative cut-value
@param edge edge with negative cut-value
"""
self.head_nodes = []
rest_edges = []
rest_edges += self.tree_edges
del rest_edges[rest_edges.index(edge)]
self.head_component(self.start, rest_edges)
if edge[1] in self.head_nodes:
l = []
for node in self.result:
if node not in self.head_nodes:
l.append(node)
self.head_nodes = l
slack = 100
new_edge = edge
for source_node in self.transitions:
if source_node in self.head_nodes:
for dest_node in self.transitions[source_node]:
if dest_node not in self.head_nodes:
if slack>(self.edge_wt[edge]-1):
slack = self.edge_wt[edge]-1
new_edge = (source_node, dest_node)
return new_edge
def leave_edge(self):
"""Returns the edge with negative cut_value(if exists)
"""
if self.critical_edges:
for edge in self.critical_edges:
self.cut_edges[edge] = 0
for edge in self.cut_edges:
if self.cut_edges[edge]<0:
return edge
return None
def finalize_rank(self, node, level):
self.result[node]['x'] = level
for destination in self.optimal_edges.get(node, []):
self.finalize_rank(destination, level+1)
def normalize(self):
"""The ranks are normalized by setting the least rank to zero.
"""
least_rank = min(map(lambda x: x['x'], self.result.values()))
if least_rank!=0:
for node in self.result:
self.result[node]['x']-=least_rank
def make_chain(self):
"""Edges between nodes more than one rank apart are replaced by chains of unit
length edges between temporary nodes.
"""
for edge in self.edge_wt:
if self.edge_wt[edge]>1:
self.transitions[edge[0]].remove(edge[1])
start = self.result[edge[0]]['x']
end = self.result[edge[1]]['x']
for rank in range(start+1, end):
if not self.result.get((rank, 'temp'), False):
self.result[(rank, 'temp')] = {'y': None, 'x': rank, 'mark': 0}
for rank in range(start, end):
if start==rank:
self.transitions[edge[0]].append((rank+1, 'temp'))
elif rank==end-1:
self.transitions.setdefault((rank, 'temp'), []).append(edge[1])
else:
self.transitions.setdefault((rank, 'temp'), []).append((rank+1, 'temp'))
def init_order(self, node, level):
"""Initialize orders the nodes in each rank with depth-first search
"""
if not self.result[node]['y']:
self.result[node]['y'] = self.order[level]
self.order[level] += 1
for sec_end in self.transitions.get(node, []):
if node!=sec_end:
self.init_order(sec_end, self.result[sec_end]['x'])
def order_heuristic(self):
for i in range(12):
self.wmedian()
def wmedian(self):
"""Applies median heuristic to find optimzed order of the nodes with in their ranks
"""
for level in self.levels:
node_median = []
nodes = self.levels[level]
for node in nodes:
node_median.append((node, self.median_value(node, level-1)))
sort_list = sorted(node_median, key=operator.itemgetter(1))
new_list = [tuple[0] for tuple in sort_list]
self.levels[level] = new_list
order = 0
for node in nodes:
self.result[node]['y'] = order
order +=1
def median_value(self, node, adj_rank):
"""Returns median value of a vertex , defined as the median position of the adjacent vertices
@param node node to process
@param adj_rank rank 1 less than the node's rank
"""
adj_nodes = self.adj_position(node, adj_rank)
l = len(adj_nodes)
m = l/2
if l==0:
return -1.0
elif l%2 == 1:
return adj_nodes[m]#median of the middle element
elif l==2:
return (adj_nodes[0]+adj_nodes[1])/2
else:
left = adj_nodes[m-1] - adj_nodes[0]
right = adj_nodes[l-1] - adj_nodes[m]
return ((adj_nodes[m-1]*right) + (adj_nodes[m]*left))/(left+right)
def adj_position(self, node, adj_rank):
"""Returns list of the present positions of the nodes adjacent to node in the given adjacent rank.
@param node node to process
@param adj_rank rank 1 less than the node's rank
"""
pre_level_nodes = self.levels.get(adj_rank, [])
adj_nodes = []
if pre_level_nodes:
for src in pre_level_nodes:
if self.transitions.get(src) and node in self.transitions[src]:
adj_nodes.append(self.result[src]['y'])
return adj_nodes
def preprocess_order(self):
levels = {}
for r in self.partial_order:
l = self.result[r]['x']
levels.setdefault(l,[])
levels[l].append(r)
self.levels = levels
def graph_order(self):
"""Finds actual-order of the nodes with respect to maximum number of nodes in a rank in component
"""
mid_pos = 0.0
max_level = max(map(lambda x: len(x), self.levels.values()))
for level in self.levels:
if level:
no = len(self.levels[level])
factor = (max_level - no) * 0.10
list = self.levels[level]
list.reverse()
if no%2==0:
first_half = list[no/2:]
factor = -factor
else:
first_half = list[no/2+1:]
if max_level==1:#for the case when horizontal graph is there
self.result[list[no/2]]['y'] = mid_pos + (self.result[list[no/2]]['x']%2 * 0.5)
else:
self.result[list[no/2]]['y'] = mid_pos + factor
last_half = list[:no/2]
i=1
for node in first_half:
self.result[node]['y'] = mid_pos - (i + factor)
i += 1
i=1
for node in last_half:
self.result[node]['y'] = mid_pos + (i + factor)
i += 1
else:
self.max_order += max_level+1
mid_pos = self.result[self.start]['y']
def tree_order(self, node, last=0):
mid_pos = self.result[node]['y']
l = self.transitions.get(node, [])
l.reverse()
no = len(l)
rest = no%2
first_half = l[no/2+rest:]
last_half = l[:no/2]
for i, child in enumerate(first_half):
self.result[child]['y'] = mid_pos - (i+1 - (0 if rest else 0.5))
if self.transitions.get(child, False):
if last:
self.result[child]['y'] = last + len(self.transitions[child])/2 + 1
last = self.tree_order(child, last)
if rest:
mid_node = l[no/2]
self.result[mid_node]['y'] = mid_pos
if self.transitions.get(mid_node, False):
if last:
self.result[mid_node]['y'] = last + len(self.transitions[mid_node])/2 + 1
if node!=mid_node:
last = self.tree_order(mid_node)
else:
if last:
self.result[mid_node]['y'] = last + 1
self.result[node]['y'] = self.result[mid_node]['y']
mid_pos = self.result[node]['y']
i=1
last_child = None
for child in last_half:
self.result[child]['y'] = mid_pos + (i - (0 if rest else 0.5))
last_child = child
i += 1
if self.transitions.get(child, False):
if last:
self.result[child]['y'] = last + len(self.transitions[child])/2 + 1
if node!=child:
last = self.tree_order(child, last)
if last_child:
last = self.result[last_child]['y']
return last
def process_order(self):
"""Finds actual-order of the nodes with respect to maximum number of nodes in a rank in component
"""
if self.Is_Cyclic:
max_level = max(map(lambda x: len(x), self.levels.values()))
if max_level%2:
self.result[self.start]['y'] = (max_level+1)/2 + self.max_order + (self.max_order and 1)
else:
self.result[self.start]['y'] = max_level /2 + self.max_order + (self.max_order and 1)
self.graph_order()
else:
self.result[self.start]['y'] = 0
self.tree_order(self.start, 0)
min_order = math.fabs(min(map(lambda x: x['y'], self.result.values())))
index = self.start_nodes.index(self.start)
same = False
roots = []
if index>0:
for start in self.start_nodes[:index]:
same = True
for edge in self.tree_list[start][1:]:
if edge in self.tree_list[self.start]:
continue
else:
same = False
break
if same:
roots.append(start)
if roots:
min_order += self.max_order
else:
min_order += self.max_order + 1
for level in self.levels:
for node in self.levels[level]:
self.result[node]['y'] += min_order
if roots:
roots.append(self.start)
one_level_el = self.tree_list[self.start][0][1]
base = self.result[one_level_el]['y']# * 2 / (index + 2)
no = len(roots)
first_half = roots[:no/2]
if no%2==0:
last_half = roots[no/2:]
else:
last_half = roots[no/2+1:]
factor = -math.floor(no/2)
for start in first_half:
self.result[start]['y'] = base + factor
factor += 1
if no%2:
self.result[roots[no/2]]['y'] = base + factor
factor +=1
for start in last_half:
self.result[start]['y'] = base + factor
factor += 1
self.max_order = max(map(lambda x: x['y'], self.result.values()))
def find_starts(self):
"""Finds other start nodes of the graph in the case when graph is disconneted
"""
rem_nodes = []
for node in self.nodes:
if not self.partial_order.get(node):
rem_nodes.append(node)
cnt = 0
while True:
if len(rem_nodes)==1:
self.start_nodes.append(rem_nodes[0])
break
else:
count = 0
new_start = rem_nodes[0]
largest_tree = []
for node in rem_nodes:
self.partial_order = {}
tree = self.make_acyclic(None, node, 0, [])
if len(tree)+1 > count:
count = len(tree) + 1
new_start = node
largest_tree = tree
else:
if not largest_tree:
new_start = rem_nodes[0]
rem_nodes.remove(new_start)
self.start_nodes.append(new_start)
for edge in largest_tree:
if edge[0] in rem_nodes:
rem_nodes.remove(edge[0])
if edge[1] in rem_nodes:
rem_nodes.remove(edge[1])
if not rem_nodes:
break
def rank(self):
"""Finds the optimized rank of the nodes using Network-simplex algorithm
"""
self.levels = {}
self.critical_edges = []
self.partial_order = {}
self.links = []
self.Is_Cyclic = False
self.tree_list[self.start] = self.make_acyclic(None, self.start, 0, [])
self.Is_Cyclic = self.rev_edges(self.tree_list[self.start])
self.process_ranking(self.start)
self.init_rank()
#make cut values of all tree edges to 0 to optimize feasible tree
e = self.leave_edge()
while e :
f = self.enter_edge(e)
if e==f:
self.critical_edges.append(e)
else:
self.exchange(e,f)
e = self.leave_edge()
#finalize rank using optimum feasible tree
# self.optimal_edges = {}
# for edge in self.tree_edges:
# source = self.optimal_edges.setdefault(edge[0], [])
# source.append(edge[1])
# self.finalize_rank(self.start, 0)
#normalization
self.normalize()
for edge in self.edge_wt:
self.edge_wt[edge] = self.result[edge[1]]['x'] - self.result[edge[0]]['x']
def order_in_rank(self):
"""Finds optimized order of the nodes within their ranks using median heuristic
"""
self.make_chain()
self.preprocess_order()
self.order = {}
max_rank = max(map(lambda x: x, self.levels.keys()))
for i in range(max_rank+1):
self.order[i] = 0
self.init_order(self.start, self.result[self.start]['x'])
for level in self.levels:
self.levels[level].sort(lambda x, y: cmp(self.result[x]['y'], self.result[y]['y']))
self.order_heuristic()
self.process_order()
def process(self, starting_node):
"""Process the graph to find ranks and order of the nodes
@param starting_node node from where to start the graph search
"""
self.start_nodes = starting_node or []
self.partial_order = {}
self.links = []
self.tree_list = {}
if self.nodes:
if self.start_nodes:
#add dummy edges to the nodes which does not have any incoming edges
tree = self.make_acyclic(None, self.start_nodes[0], 0, [])
for node in self.no_ancester:
for sec_node in self.transitions.get(node, []):
if sec_node in self.partial_order.keys():
self.transitions[self.start_nodes[0]].append(node)
break
self.partial_order = {}
tree = self.make_acyclic(None, self.start_nodes[0], 0, [])
# if graph is disconnected or no start-node is given
#than to find starting_node for each component of the node
if len(self.nodes) > len(self.partial_order):
self.find_starts()
self.max_order = 0
#for each component of the graph find ranks and order of the nodes
for s in self.start_nodes:
self.start = s
self.rank() # First step:Netwoek simplex algorithm
self.order_in_rank() #Second step: ordering nodes within ranks
def __str__(self):
result = ''
for l in self.levels:
result += 'PosY: ' + str(l) + '\n'
for node in self.levels[l]:
result += '\tPosX: '+ str(self.result[node]['y']) + ' - Node:' + str(node) + "\n"
return result
def scale(self, maxx, maxy, nwidth=0, nheight=0, margin=20):
"""Computes actual co-ordiantes of the nodes
"""
#for flat edges ie. source an destination nodes are on the same rank
for src in self.transitions:
for des in self.transitions[src]:
if self.result[des]['x'] - self.result[src]['x'] == 0:
self.result[src]['x'] += 0.08
self.result[des]['x'] -= 0.08
factorX = maxx + nheight
factorY = maxy + nwidth
for node in self.result:
self.result[node]['y'] = (self.result[node]['y']) * factorX + margin
self.result[node]['x'] = (self.result[node]['x']) * factorY + margin
def result_get(self):
return self.result
if __name__=='__main__':
starting_node = ['profile'] # put here nodes with flow_start=True
nodes = ['project','account','hr','base','product','mrp','test','profile']
transitions = [
('profile','mrp'),
('mrp','project'),
('project','product'),
('mrp','hr'),
('mrp','test'),
('project','account'),
('project','hr'),
('product','base'),
('account','product'),
('account','test'),
('account','base'),
('hr','base'),
('test','base')
]
radius = 20
g = graph(nodes, transitions)
g.process(starting_node)
g.scale(radius*3,radius*3, radius, radius)
from PIL import Image
from PIL import ImageDraw
img = Image.new("RGB", (800, 600), "#ffffff")
draw = ImageDraw.Draw(img)
result = g.result_get()
node_res = {}
for node in nodes:
node_res[node] = result[node]
for name,node in node_res.items():
draw.arc( (int(node['y']-radius), int(node['x']-radius),int(node['y']+radius), int(node['x']+radius) ), 0, 360, (128,128,128))
draw.text( (int(node['y']), int(node['x'])), str(name), (128,128,128))
for t in transitions:
draw.line( (int(node_res[t[0]]['y']), int(node_res[t[0]]['x']),int(node_res[t[1]]['y']),int(node_res[t[1]]['x'])),(128,128,128) )
img.save("graph.png", "PNG")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
geneanet/moneta | moneta/http/server.py | 1 | 6949 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from gevent.server import StreamServer
from gevent import sleep
import logging
from fcntl import fcntl, F_GETFD, F_SETFD, FD_CLOEXEC
from collections import OrderedDict
import re
import json
import traceback
from moneta.http.http import *
logger = logging.getLogger('moneta.http.server')
class HTTPServer(object):
allowed_methods = [
'OPTION',
'GET',
'HEAD',
'POST',
'PUT',
'DELETE',
'TRACE',
'CONNECT'
]
def __init__(self, address, request_log_level = logging.INFO):
self.address = address
self.server = None
self.routes = OrderedDict()
self.request_log_level = request_log_level
@staticmethod
def _set_cloexec(socket):
""" Set the CLOEXEC attribute on a file descriptor """
flags = fcntl(socket, F_GETFD)
fcntl(socket, F_SETFD, flags | FD_CLOEXEC)
def run(self):
logger.debug("Listening on %s:%d", *self.address)
self.server = StreamServer(self.address, self.handle_connection)
self.server.start()
self._set_cloexec(self.server.socket)
def run_forever(self):
self.run()
while True:
sleep(30)
def handle_connection(self, socket, address):
self._set_cloexec(self.server.socket)
logger.debug("[%s:%d] Incoming connection", *address)
fp = socket.makefile()
keepalive = True
while keepalive:
# Query
while True:
line = fp.readline()
if not line:
logger.debug("[%s:%d] Connection closed", *address)
return
else:
line = line.strip()
if line:
break
elements = line.split(" ")
if len(elements) < 2 or len(elements) > 3 or not elements[1] or elements[0] not in self.allowed_methods:
http_send_reply(socket, HTTPReply(code = 400, body = "<h1>Bad request</h1><pre>%s</pre>" % line))
logger.error("Unable to parse request [%s]", line)
continue
else:
method = elements[0]
uri = elements[1]
if len(elements) == 3:
version = elements[2]
else:
version = None
# Headers
headers = {}
while True:
line = fp.readline()
if not line:
logger.debug("[%s:%d] Connection closed", *address)
return
else:
line = line.strip()
if line == "":
break
try:
(header, data) = line.split(':', 1)
header = header.strip()
data = data.strip()
headers[header] = data
except Exception:
http_send_reply(socket, HTTPReply(code = 400, body = "<h1>Unable to parse header line</h1><pre>%s</pre>" % line))
logger.warning("Unable to parse header line [%s]", line)
continue
# Body
body = ""
if http_has_header(headers, 'content-length'):
try:
bodylength = int(http_get_header(headers, 'content-length'))
except Exception:
http_send_reply(socket, HTTPReply(code = 400, body = "<h1>Unable to parse content-length header</h1>"))
logger.warning("Unable to parse content-length header [%s]", http_get_header(headers, 'content-length'))
continue
body = fp.read(bodylength)
# Processing
logger.debug("[%s] Processing request %s %s", repr(address), method, uri)
try:
request = HTTPRequest(method, uri, version, headers, body)
reply = self.handle_request(socket, address, request)
except BaseException:
reply = HTTPReply(code = 500)
raise
finally:
if self.request_log_level:
logger.log(self.request_log_level, "[%s] Processed request %s %s. Return code %d.", repr(address), method, uri, reply.code)
http_send_reply(socket, reply)
# Keep-alive
if http_has_header(headers, 'connection'):
if http_match_header(headers, 'connection', 'keep-alive'):
keepalive = True
elif http_match_header(headers, 'connection', 'close'):
keepalive = False
elif version == "HTTP/1.0":
keepalive = False
else:
keepalive = True
socket.close()
logger.debug("[%s:%d] Connection closed", *address)
def register_route(self, route, controller, methods = "GET"):
""" Register a function to generate response for an HTTP query """
if not hasattr(controller, '__call__'):
raise TypeError("Controller must be callable")
if isinstance(methods, (str, unicode)):
methods = { methods }
if not isinstance(methods, set):
raise TypeError('Methods must be a string or a set')
try:
regex = re.compile("^%s$" % route)
except Exception as e:
raise ValueError('Unable to compile regex for route {0}'.format(route))
for method in methods:
logger.debug("Registering method %s for route %s", method, route)
self.routes[(route, method)] = {
'controller': controller,
'regex': regex
}
def handle_request(self, socket, address, request):
"""Handle a HTTP request, finding the right route"""
# Fold multiple / in URL
request.uri_path = re.sub(r'/+', r'/', request.uri_path)
try:
reply = HTTPReply(code = 404)
for ((route, method), routeconfig) in self.routes.iteritems():
match = routeconfig['regex'].match(request.uri_path)
if match:
if request.method == method:
logger.debug('Matched route %s method %s', route, method)
reply = routeconfig['controller'](request)
break
else:
reply = HTTPReply(code = 405)
except Exception, e:
logger.exception("Caught exception while handling request %s %s", request.method, request.uri)
reply = HTTPReply(code = 500, body = json.dumps({"error": True, "message": repr(e), "traceback": traceback.format_exc()}), headers = { 'Content-Type': 'application/javascript' })
return reply
| bsd-3-clause |
Nexenta/cinder | cinder/volume/drivers/falconstor/iscsi.py | 5 | 3928 | # Copyright (c) 2016 FalconStor, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Volume driver for FalconStor FSS storage system.
This driver requires FSS-8.00-8865 or later.
"""
from cinder import interface
import cinder.volume.driver
from cinder.volume.drivers.falconstor import fss_common
DEFAULT_ISCSI_PORT = 3260
@interface.volumedriver
class FSSISCSIDriver(fss_common.FalconstorBaseDriver,
cinder.volume.driver.ISCSIDriver):
"""Implements commands for FalconStor FSS ISCSI management.
To enable the driver add the following line to the cinder configuration:
volume_driver=cinder.volume.drivers.falconstor.iscsi.FSSISCSIDriver
Version history:
1.0.0 - Initial driver
1.0.1 - Fix copy_image_to_volume error.
1.0.2 - Closes-Bug #1554184, add lun id type conversion in
initialize_connection
1.03 - merge source code
1.04 - Fixed create_volume_from_snapshot(), create_cloned_volume()
metadata TypeError
2.0.0 - Mitaka driver
-- fixed consisgroup commands error.
2.0.1 -- fixed bugs
2.0.2 -- support Multipath
3.0.0 - Newton driver
"""
VERSION = '3.0.0'
# ThirdPartySystems wiki page
CI_WIKI_NAME = "FalconStor_CI"
def __init__(self, *args, **kwargs):
super(FSSISCSIDriver, self).__init__(*args, **kwargs)
self._storage_protocol = "iSCSI"
self._backend_name = (
self.configuration.safe_get('volume_backend_name') or
self.__class__.__name__)
def initialize_connection(self, volume, connector, initiator_data=None):
fss_hosts = []
target_portal = []
multipath = connector.get('multipath', False)
fss_hosts.append(self.configuration.san_ip)
if multipath:
if self._check_multipath():
fss_hosts.append(self.configuration.san_secondary_ip)
else:
multipath = False
for host in fss_hosts:
iscsi_ip_port = "%s:%d" % (host, DEFAULT_ISCSI_PORT)
target_portal.append(iscsi_ip_port)
target_info = self.proxy.initialize_connection_iscsi(volume,
connector,
fss_hosts)
properties = {}
properties['target_discovered'] = True
properties['discard'] = True
properties['encrypted'] = False
properties['qos_specs'] = None
properties['access_mode'] = 'rw'
properties['volume_id'] = volume['id']
properties['target_iqn'] = target_info['iqn']
properties['target_portal'] = target_portal[0]
properties['target_lun'] = int(target_info['lun'])
if multipath:
properties['target_iqns'] = [target_info['iqn'],
target_info['iqn']]
properties['target_portals'] = target_portal
properties['target_luns'] = [int(target_info['lun']),
int(target_info['lun'])]
return {'driver_volume_type': 'iscsi', 'data': properties}
def terminate_connection(self, volume, connector, **kwargs):
"""Terminate connection."""
self.proxy.terminate_connection_iscsi(volume, connector)
| apache-2.0 |
mfherbst/spack | lib/spack/spack/util/spack_yaml.py | 2 | 12644 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""Enhanced YAML parsing for Spack.
- ``load()`` preserves YAML Marks on returned objects -- this allows
us to access file and line information later.
- ``Our load methods use ``OrderedDict`` class instead of YAML's
default unorderd dict.
"""
from ordereddict_backport import OrderedDict
from six import string_types, StringIO
import yaml
from yaml import Loader, Dumper
from yaml.nodes import MappingNode, SequenceNode, ScalarNode
from yaml.constructor import ConstructorError
from llnl.util.tty.color import colorize, clen, cextra
import spack.error
# Only export load and dump
__all__ = ['load', 'dump', 'SpackYAMLError']
# Make new classes so we can add custom attributes.
# Also, use OrderedDict instead of just dict.
class syaml_dict(OrderedDict):
def __repr__(self):
mappings = ('%r: %r' % (k, v) for k, v in self.items())
return '{%s}' % ', '.join(mappings)
class syaml_list(list):
__repr__ = list.__repr__
class syaml_str(str):
__repr__ = str.__repr__
class syaml_int(int):
__repr__ = str.__repr__
#: mapping from syaml type -> primitive type
syaml_types = {
syaml_str: string_types,
syaml_int: int,
syaml_dict: dict,
syaml_list: list,
}
def syaml_type(obj):
"""Get the corresponding syaml wrapper type for a primitive type.
Return:
(object): syaml-typed copy of object, or the obj if no wrapper
"""
for syaml_t, t in syaml_types.items():
if type(obj) is not bool and isinstance(obj, t):
return syaml_t(obj) if type(obj) != syaml_t else obj
return obj
def markable(obj):
"""Whether an object can be marked."""
return type(obj) in syaml_types
def mark(obj, node):
"""Add start and end markers to an object."""
if not markable(obj):
return
if hasattr(node, 'start_mark'):
obj._start_mark = node.start_mark
elif hasattr(node, '_start_mark'):
obj._start_mark = node._start_mark
if hasattr(node, 'end_mark'):
obj._end_mark = node.end_mark
elif hasattr(node, '_end_mark'):
obj._end_mark = node._end_mark
def marked(obj):
"""Whether an object has been marked by spack_yaml."""
return (hasattr(obj, '_start_mark') and obj._start_mark or
hasattr(obj, '_end_mark') and obj._end_mark)
class OrderedLineLoader(Loader):
"""YAML loader that preserves order and line numbers.
Mappings read in by this loader behave like an ordered dict.
Sequences, mappings, and strings also have new attributes,
``_start_mark`` and ``_end_mark``, that preserve YAML line
information in the output data.
"""
#
# Override construct_yaml_* so that they build our derived types,
# which allows us to add new attributes to them.
#
# The standard YAML constructors return empty instances and fill
# in with mappings later. We preserve this behavior.
#
def construct_yaml_str(self, node):
value = self.construct_scalar(node)
value = syaml_str(value)
mark(value, node)
return value
def construct_yaml_seq(self, node):
data = syaml_list()
mark(data, node)
yield data
data.extend(self.construct_sequence(node))
def construct_yaml_map(self, node):
data = syaml_dict()
mark(data, node)
yield data
value = self.construct_mapping(node)
data.update(value)
#
# Override the ``construct_*`` routines. These fill in empty
# objects after yielded by the above ``construct_yaml_*`` methods.
#
def construct_sequence(self, node, deep=False):
if not isinstance(node, SequenceNode):
raise ConstructorError(
None, None,
"expected a sequence node, but found %s" % node.id,
node.start_mark)
value = syaml_list(self.construct_object(child, deep=deep)
for child in node.value)
mark(value, node)
return value
def construct_mapping(self, node, deep=False):
"""Store mappings as OrderedDicts instead of as regular python
dictionaries to preserve file ordering."""
if not isinstance(node, MappingNode):
raise ConstructorError(
None, None,
"expected a mapping node, but found %s" % node.id,
node.start_mark)
mapping = syaml_dict()
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
except TypeError as exc:
raise ConstructorError(
"while constructing a mapping", node.start_mark,
"found unacceptable key (%s)" % exc, key_node.start_mark)
value = self.construct_object(value_node, deep=deep)
if key in mapping:
raise ConstructorError(
"while constructing a mapping", node.start_mark,
"found already in-use key (%s)" % key, key_node.start_mark)
mapping[key] = value
mark(mapping, node)
return mapping
# register above new constructors
OrderedLineLoader.add_constructor(
'tag:yaml.org,2002:map', OrderedLineLoader.construct_yaml_map)
OrderedLineLoader.add_constructor(
'tag:yaml.org,2002:seq', OrderedLineLoader.construct_yaml_seq)
OrderedLineLoader.add_constructor(
'tag:yaml.org,2002:str', OrderedLineLoader.construct_yaml_str)
class OrderedLineDumper(Dumper):
"""Dumper that preserves ordering and formats ``syaml_*`` objects.
This dumper preserves insertion ordering ``syaml_dict`` objects
when they're written out. It also has some custom formatters
for ``syaml_*`` objects so that they are formatted like their
regular Python equivalents, instead of ugly YAML pyobjects.
"""
def represent_mapping(self, tag, mapping, flow_style=None):
value = []
node = MappingNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
best_style = True
if hasattr(mapping, 'items'):
# if it's a syaml_dict, preserve OrderedDict order.
# Otherwise do the default thing.
sort = not isinstance(mapping, syaml_dict)
mapping = list(mapping.items())
if sort:
mapping.sort()
for item_key, item_value in mapping:
node_key = self.represent_data(item_key)
node_value = self.represent_data(item_value)
if not (isinstance(node_key, ScalarNode) and not node_key.style):
best_style = False
if not (isinstance(node_value, ScalarNode) and
not node_value.style):
best_style = False
value.append((node_key, node_value))
if flow_style is None:
if self.default_flow_style is not None:
node.flow_style = self.default_flow_style
else:
node.flow_style = best_style
return node
def ignore_aliases(self, _data):
"""Make the dumper NEVER print YAML aliases."""
return True
# Make our special objects look like normal YAML ones.
OrderedLineDumper.add_representer(syaml_dict, OrderedLineDumper.represent_dict)
OrderedLineDumper.add_representer(syaml_list, OrderedLineDumper.represent_list)
OrderedLineDumper.add_representer(syaml_str, OrderedLineDumper.represent_str)
OrderedLineDumper.add_representer(syaml_int, OrderedLineDumper.represent_int)
def file_line(mark):
"""Format a mark as <file>:<line> information."""
result = mark.name
if mark.line:
result += ':' + str(mark.line)
return result
#: Global for interactions between LineAnnotationDumper and dump_annotated().
#: This is nasty but YAML doesn't give us many ways to pass arguments --
#: yaml.dump() takes a class (not an instance) and instantiates the dumper
#: itself, so we can't just pass an instance
_annotations = []
class LineAnnotationDumper(OrderedLineDumper):
"""Dumper that generates per-line annotations.
Annotations are stored in the ``_annotations`` global. After one
dump pass, the strings in ``_annotations`` will correspond one-to-one
with the lines output by the dumper.
LineAnnotationDumper records blame information after each line is
generated. As each line is parsed, it saves file/line info for each
object printed. At the end of each line, it creates an annotation
based on the saved mark and stores it in ``_annotations``.
For an example of how to use this, see ``dump_annotated()``, which
writes to a ``StringIO`` then joins the lines from that with
annotations.
"""
saved = None
def __init__(self, *args, **kwargs):
super(LineAnnotationDumper, self).__init__(*args, **kwargs)
del _annotations[:]
def process_scalar(self):
super(LineAnnotationDumper, self).process_scalar()
if marked(self.event.value):
self.saved = self.event.value
def represent_data(self, data):
"""Force syaml_str to be passed through with marks."""
result = super(LineAnnotationDumper, self).represent_data(data)
if isinstance(result.value, string_types):
result.value = syaml_str(data)
mark(result.value, data)
return result
def write_stream_start(self):
super(LineAnnotationDumper, self).write_stream_start()
_annotations.append(colorize('@K{---}'))
def write_line_break(self):
super(LineAnnotationDumper, self).write_line_break()
if not self.saved:
return
# append annotations at the end of each line
if self.saved:
mark = self.saved._start_mark
ann = '@K{%s}' % mark.name
if mark.line is not None:
ann += ':@c{%s}' % (mark.line + 1)
_annotations.append(colorize(ann))
else:
_annotations.append('')
def load(*args, **kwargs):
"""Load but modify the loader instance so that it will add __line__
atrributes to the returned object."""
kwargs['Loader'] = OrderedLineLoader
return yaml.load(*args, **kwargs)
def dump(*args, **kwargs):
blame = kwargs.pop('blame', False)
if blame:
return dump_annotated(*args, **kwargs)
else:
kwargs['Dumper'] = OrderedLineDumper
return yaml.dump(*args, **kwargs)
def dump_annotated(data, stream=None, *args, **kwargs):
kwargs['Dumper'] = LineAnnotationDumper
sio = StringIO()
yaml.dump(data, sio, *args, **kwargs)
lines = sio.getvalue().rstrip().split('\n')
getvalue = None
if stream is None:
stream = StringIO()
getvalue = stream.getvalue
# write out annotations and linees, accounting for color
width = max(clen(a) for a in _annotations)
formats = ['%%-%ds %%s\n' % (width + cextra(a)) for a in _annotations]
for f, a, l in zip(formats, _annotations, lines):
stream.write(f % (a, l))
if getvalue:
return getvalue()
class SpackYAMLError(spack.error.SpackError):
"""Raised when there are issues with YAML parsing."""
def __init__(self, msg, yaml_error):
super(SpackYAMLError, self).__init__(msg, str(yaml_error))
| lgpl-2.1 |
jciskey/pygraph | pygraph/helpers/functions.py | 1 | 6839 | """Gathers together a collection of helper functions and classes that the library needs,
but end users won't care about."""
import copy
from ..classes import UndirectedGraph, DirectedGraph
# Graph Conversions
def make_subgraph(graph, vertices, edges):
"""Converts a subgraph given by a list of vertices and edges into a graph object."""
# Copy the entire graph
local_graph = copy.deepcopy(graph)
# Remove all the edges that aren't in the list
edges_to_delete = [x for x in local_graph.get_all_edge_ids() if x not in edges]
for e in edges_to_delete:
local_graph.delete_edge_by_id(e)
# Remove all the vertices that aren't in the list
nodes_to_delete = [x for x in local_graph.get_all_node_ids() if x not in vertices]
for n in nodes_to_delete:
local_graph.delete_node(n)
return local_graph
def convert_graph_directed_to_undirected(dg):
"""Converts a directed graph into an undirected graph. Directed edges are made undirected."""
udg = UndirectedGraph()
# Copy the graph
# --Copy nodes
# --Copy edges
udg.nodes = copy.deepcopy(dg.nodes)
udg.edges = copy.deepcopy(dg.edges)
udg.next_node_id = dg.next_node_id
udg.next_edge_id = dg.next_edge_id
# Convert the directed edges into undirected edges
for edge_id in udg.get_all_edge_ids():
edge = udg.get_edge(edge_id)
target_node_id = edge['vertices'][1]
target_node = udg.get_node(target_node_id)
target_node['edges'].append(edge_id)
return udg
def remove_duplicate_edges_directed(dg):
"""Removes duplicate edges from a directed graph."""
# With directed edges, we can just hash the to and from node id tuples and if
# a node happens to conflict with one that already exists, we delete it
# --For aesthetic, we sort the edge ids so that lower edge ids are kept
lookup = {}
edges = sorted(dg.get_all_edge_ids())
for edge_id in edges:
e = dg.get_edge(edge_id)
tpl = e['vertices']
if tpl in lookup:
dg.delete_edge_by_id(edge_id)
else:
lookup[tpl] = edge_id
def remove_duplicate_edges_undirected(udg):
"""Removes duplicate edges from an undirected graph."""
# With undirected edges, we need to hash both combinations of the to-from node ids, since a-b and b-a are equivalent
# --For aesthetic, we sort the edge ids so that lower edges ids are kept
lookup = {}
edges = sorted(udg.get_all_edge_ids())
for edge_id in edges:
e = udg.get_edge(edge_id)
tpl_a = e['vertices']
tpl_b = (tpl_a[1], tpl_a[0])
if tpl_a in lookup or tpl_b in lookup:
udg.delete_edge_by_id(edge_id)
else:
lookup[tpl_a] = edge_id
lookup[tpl_b] = edge_id
def get_vertices_from_edge_list(graph, edge_list):
"""Transforms a list of edges into a list of the nodes those edges connect.
Returns a list of nodes, or an empty list if given an empty list.
"""
node_set = set()
for edge_id in edge_list:
edge = graph.get_edge(edge_id)
a, b = edge['vertices']
node_set.add(a)
node_set.add(b)
return list(node_set)
def get_subgraph_from_edge_list(graph, edge_list):
"""Transforms a list of edges into a subgraph."""
node_list = get_vertices_from_edge_list(graph, edge_list)
subgraph = make_subgraph(graph, node_list, edge_list)
return subgraph
def merge_graphs(main_graph, addition_graph):
"""Merges an ''addition_graph'' into the ''main_graph''.
Returns a tuple of dictionaries, mapping old node ids and edge ids to new ids.
"""
node_mapping = {}
edge_mapping = {}
for node in addition_graph.get_all_node_objects():
node_id = node['id']
new_id = main_graph.new_node()
node_mapping[node_id] = new_id
for edge in addition_graph.get_all_edge_objects():
edge_id = edge['id']
old_vertex_a_id, old_vertex_b_id = edge['vertices']
new_vertex_a_id = node_mapping[old_vertex_a_id]
new_vertex_b_id = node_mapping[old_vertex_b_id]
new_edge_id = main_graph.new_edge(new_vertex_a_id, new_vertex_b_id)
edge_mapping[edge_id] = new_edge_id
return node_mapping, edge_mapping
def create_graph_from_adjacency_matrix(adjacency_matrix):
"""Generates a graph from an adjacency matrix specification.
Returns a tuple containing the graph and a list-mapping of node ids to matrix column indices.
The graph will be an UndirectedGraph if the provided adjacency matrix is symmetric.
The graph will be a DirectedGraph if the provided adjacency matrix is not symmetric.
Ref: http://mathworld.wolfram.com/AdjacencyMatrix.html"""
if is_adjacency_matrix_symmetric(adjacency_matrix):
graph = UndirectedGraph()
else:
graph = DirectedGraph()
node_column_mapping = []
num_columns = len(adjacency_matrix)
for _ in range(num_columns):
node_id = graph.new_node()
node_column_mapping.append(node_id)
for j in range(num_columns):
for i in range(num_columns):
if adjacency_matrix[j][i]:
jnode_id = node_column_mapping[j]
inode_id = node_column_mapping[i]
# Because of our adjacency matrix encoding, [j][i] in our code corresponds to [i][j] in a traditional matrix interpretation
# Thus, we need to put an edge from node i to node j if [j][i] in our code is non-zero
graph.new_edge(inode_id, jnode_id)
return (graph, node_column_mapping)
def is_adjacency_matrix_symmetric(adjacency_matrix):
"""Determines if an adjacency matrix is symmetric.
Ref: http://mathworld.wolfram.com/SymmetricMatrix.html"""
# Verify that the matrix is square
num_columns = len(adjacency_matrix)
for column in adjacency_matrix:
# In a square matrix, every row should be the same length as the number of columns
if len(column) != num_columns:
return False
# Loop through the bottom half of the matrix and compare it to the top half
# --We do the bottom half because of how we construct adjacency matrices
max_i = 0
for j in range(num_columns):
for i in range(max_i):
# If i == j, we can skip ahead so we don't compare with ourself
if i == j:
continue
# Compare the value in the bottom half with the mirrored value in the top half
# If they aren't the same, the matrix isn't symmetric
if adjacency_matrix[j][i] != adjacency_matrix[i][j]:
return False
max_i += 1
# If we reach this far without returning false, then we know that everything matched,
# which makes this a symmetric matrix
return True
| mit |
felixjimenez/django | django/template/loaders/app_directories.py | 105 | 2354 | """
Wrapper for loading templates from "templates" directories in INSTALLED_APPS
packages.
"""
import os
import sys
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.template.base import TemplateDoesNotExist
from django.template.loader import BaseLoader
from django.utils._os import safe_join
from django.utils.importlib import import_module
from django.utils import six
# At compile time, cache the directories to search.
if six.PY2:
fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
app_template_dirs = []
for app in settings.INSTALLED_APPS:
try:
mod = import_module(app)
except ImportError as e:
raise ImproperlyConfigured('ImportError %s: %s' % (app, e.args[0]))
template_dir = os.path.join(os.path.dirname(mod.__file__), 'templates')
if os.path.isdir(template_dir):
if six.PY2:
template_dir = template_dir.decode(fs_encoding)
app_template_dirs.append(template_dir)
# It won't change, so convert it to a tuple to save memory.
app_template_dirs = tuple(app_template_dirs)
class Loader(BaseLoader):
is_usable = True
def get_template_sources(self, template_name, template_dirs=None):
"""
Returns the absolute paths to "template_name", when appended to each
directory in "template_dirs". Any paths that don't lie inside one of the
template dirs are excluded from the result set, for security reasons.
"""
if not template_dirs:
template_dirs = app_template_dirs
for template_dir in template_dirs:
try:
yield safe_join(template_dir, template_name)
except UnicodeDecodeError:
# The template dir name was a bytestring that wasn't valid UTF-8.
raise
except ValueError:
# The joined path was located outside of template_dir.
pass
def load_template_source(self, template_name, template_dirs=None):
for filepath in self.get_template_sources(template_name, template_dirs):
try:
with open(filepath, 'rb') as fp:
return (fp.read().decode(settings.FILE_CHARSET), filepath)
except IOError:
pass
raise TemplateDoesNotExist(template_name)
| bsd-3-clause |
santosh-surya/pcb-cnc-machine | desktop-application/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/input_test.py | 1841 | 3207 | #!/usr/bin/env python
# Copyright 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the input.py file."""
import gyp.input
import unittest
import sys
class TestFindCycles(unittest.TestCase):
def setUp(self):
self.nodes = {}
for x in ('a', 'b', 'c', 'd', 'e'):
self.nodes[x] = gyp.input.DependencyGraphNode(x)
def _create_dependency(self, dependent, dependency):
dependent.dependencies.append(dependency)
dependency.dependents.append(dependent)
def test_no_cycle_empty_graph(self):
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_no_cycle_line(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_no_cycle_dag(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['a'], self.nodes['c'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_cycle_self_reference(self):
self._create_dependency(self.nodes['a'], self.nodes['a'])
self.assertEquals([[self.nodes['a'], self.nodes['a']]],
self.nodes['a'].FindCycles())
def test_cycle_two_nodes(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
self.nodes['a'].FindCycles())
self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
self.nodes['b'].FindCycles())
def test_two_cycles(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['b'])
cycles = self.nodes['a'].FindCycles()
self.assertTrue(
[self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles)
self.assertTrue(
[self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles)
self.assertEquals(2, len(cycles))
def test_big_cycle(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])
self._create_dependency(self.nodes['d'], self.nodes['e'])
self._create_dependency(self.nodes['e'], self.nodes['a'])
self.assertEquals([[self.nodes['a'],
self.nodes['b'],
self.nodes['c'],
self.nodes['d'],
self.nodes['e'],
self.nodes['a']]],
self.nodes['a'].FindCycles())
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
hWorblehat/smfs | googletest/googletest/test/gtest_catch_exceptions_test.py | 2139 | 9901 | #!/usr/bin/env python
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests Google Test's exception catching behavior.
This script invokes gtest_catch_exceptions_test_ and
gtest_catch_exceptions_ex_test_ (programs written with
Google Test) and verifies their output.
"""
__author__ = 'vladl@google.com (Vlad Losev)'
import os
import gtest_test_utils
# Constants.
FLAG_PREFIX = '--gtest_'
LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests'
NO_CATCH_EXCEPTIONS_FLAG = FLAG_PREFIX + 'catch_exceptions=0'
FILTER_FLAG = FLAG_PREFIX + 'filter'
# Path to the gtest_catch_exceptions_ex_test_ binary, compiled with
# exceptions enabled.
EX_EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'gtest_catch_exceptions_ex_test_')
# Path to the gtest_catch_exceptions_test_ binary, compiled with
# exceptions disabled.
EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'gtest_catch_exceptions_no_ex_test_')
environ = gtest_test_utils.environ
SetEnvVar = gtest_test_utils.SetEnvVar
# Tests in this file run a Google-Test-based test program and expect it
# to terminate prematurely. Therefore they are incompatible with
# the premature-exit-file protocol by design. Unset the
# premature-exit filepath to prevent Google Test from creating
# the file.
SetEnvVar(gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None)
TEST_LIST = gtest_test_utils.Subprocess(
[EXE_PATH, LIST_TESTS_FLAG], env=environ).output
SUPPORTS_SEH_EXCEPTIONS = 'ThrowsSehException' in TEST_LIST
if SUPPORTS_SEH_EXCEPTIONS:
BINARY_OUTPUT = gtest_test_utils.Subprocess([EXE_PATH], env=environ).output
EX_BINARY_OUTPUT = gtest_test_utils.Subprocess(
[EX_EXE_PATH], env=environ).output
# The tests.
if SUPPORTS_SEH_EXCEPTIONS:
# pylint:disable-msg=C6302
class CatchSehExceptionsTest(gtest_test_utils.TestCase):
"""Tests exception-catching behavior."""
def TestSehExceptions(self, test_output):
self.assert_('SEH exception with code 0x2a thrown '
'in the test fixture\'s constructor'
in test_output)
self.assert_('SEH exception with code 0x2a thrown '
'in the test fixture\'s destructor'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in SetUpTestCase()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in TearDownTestCase()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in SetUp()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in TearDown()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in the test body'
in test_output)
def testCatchesSehExceptionsWithCxxExceptionsEnabled(self):
self.TestSehExceptions(EX_BINARY_OUTPUT)
def testCatchesSehExceptionsWithCxxExceptionsDisabled(self):
self.TestSehExceptions(BINARY_OUTPUT)
class CatchCxxExceptionsTest(gtest_test_utils.TestCase):
"""Tests C++ exception-catching behavior.
Tests in this test case verify that:
* C++ exceptions are caught and logged as C++ (not SEH) exceptions
* Exception thrown affect the remainder of the test work flow in the
expected manner.
"""
def testCatchesCxxExceptionsInFixtureConstructor(self):
self.assert_('C++ exception with description '
'"Standard C++ exception" thrown '
'in the test fixture\'s constructor'
in EX_BINARY_OUTPUT)
self.assert_('unexpected' not in EX_BINARY_OUTPUT,
'This failure belongs in this test only if '
'"CxxExceptionInConstructorTest" (no quotes) '
'appears on the same line as words "called unexpectedly"')
if ('CxxExceptionInDestructorTest.ThrowsExceptionInDestructor' in
EX_BINARY_OUTPUT):
def testCatchesCxxExceptionsInFixtureDestructor(self):
self.assert_('C++ exception with description '
'"Standard C++ exception" thrown '
'in the test fixture\'s destructor'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInDestructorTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInSetUpTestCase(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in SetUpTestCase()'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInConstructorTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest constructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest::SetUp() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest::TearDown() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest test body '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInTearDownTestCase(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in TearDownTestCase()'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInSetUp(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in SetUp()'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTest::TearDown() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('unexpected' not in EX_BINARY_OUTPUT,
'This failure belongs in this test only if '
'"CxxExceptionInSetUpTest" (no quotes) '
'appears on the same line as words "called unexpectedly"')
def testCatchesCxxExceptionsInTearDown(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in TearDown()'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTearDownTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTearDownTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInTestBody(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in the test body'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTestBodyTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTestBodyTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTestBodyTest::TearDown() '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesNonStdCxxExceptions(self):
self.assert_('Unknown C++ exception thrown in the test body'
in EX_BINARY_OUTPUT)
def testUnhandledCxxExceptionsAbortTheProgram(self):
# Filters out SEH exception tests on Windows. Unhandled SEH exceptions
# cause tests to show pop-up windows there.
FITLER_OUT_SEH_TESTS_FLAG = FILTER_FLAG + '=-*Seh*'
# By default, Google Test doesn't catch the exceptions.
uncaught_exceptions_ex_binary_output = gtest_test_utils.Subprocess(
[EX_EXE_PATH,
NO_CATCH_EXCEPTIONS_FLAG,
FITLER_OUT_SEH_TESTS_FLAG],
env=environ).output
self.assert_('Unhandled C++ exception terminating the program'
in uncaught_exceptions_ex_binary_output)
self.assert_('unexpected' not in uncaught_exceptions_ex_binary_output)
if __name__ == '__main__':
gtest_test_utils.Main()
| gpl-2.0 |
aldevigi/ursula | plugins/filters/ursula_filters.py | 17 | 1147 | import os
def ursula_controller_ips(hostvars, groups, controller_name='controller'):
controller_ips = set()
for host in groups[controller_name]:
controller_primary_interface = hostvars[host]['primary_interface']
ip = hostvars[host][controller_primary_interface]['ipv4']['address']
controller_ips.add(ip)
return sorted(list(controller_ips))
def ursula_memcache_hosts(hostvars, groups, memcache_port, controller_name='controller'):
controller_ips = ursula_controller_ips(hostvars, groups, controller_name)
host_strings = ['%s:%s' % (c, memcache_port) for c in controller_ips]
return ','.join(host_strings)
def ursula_package_path(project, version):
package_name_format = "openstack-%(version)s" % locals()
path = os.path.join("/opt/bbc", package_name_format, project)
return path
class FilterModule(object):
''' ursula utility filters '''
def filters(self):
return {
'ursula_controller_ips': ursula_controller_ips,
'ursula_memcache_hosts': ursula_memcache_hosts,
'ursula_package_path': ursula_package_path,
}
| mit |
liuqr/edx-xiaodun | lms/djangoapps/wechat/courses.py | 7 | 11260 | from collections import defaultdict
from fs.errors import ResourceNotFoundError
import logging
import inspect
from path import path
from django.http import Http404
from django.conf import settings
from .module_render import get_module
from xmodule.course_module import CourseDescriptor
from xmodule.modulestore import Location, XML_MODULESTORE_TYPE
from xmodule.modulestore.django import modulestore, loc_mapper
from xmodule.contentstore.content import StaticContent
from xmodule.modulestore.exceptions import ItemNotFoundError, InvalidLocationError
from courseware.model_data import FieldDataCache
from static_replace import replace_static_urls
from courseware.access import has_access
import branding
log = logging.getLogger(__name__)
def get_request_for_thread():
"""Walk up the stack, return the nearest first argument named "request"."""
frame = None
try:
for f in inspect.stack()[1:]:
frame = f[0]
code = frame.f_code
if code.co_varnames[:1] == ("request",):
return frame.f_locals["request"]
elif code.co_varnames[:2] == ("self", "request",):
return frame.f_locals["request"]
finally:
del frame
def get_course(course_id, depth=0):
"""
Given a course id, return the corresponding course descriptor.
If course_id is not valid, raises a ValueError. This is appropriate
for internal use.
depth: The number of levels of children for the modulestore to cache.
None means infinite depth. Default is to fetch no children.
"""
try:
course_loc = CourseDescriptor.id_to_location(course_id)
return modulestore().get_instance(course_id, course_loc, depth=depth)
except (KeyError, ItemNotFoundError):
raise ValueError("Course not found: {}".format(course_id))
except InvalidLocationError:
raise ValueError("Invalid location: {}".format(course_id))
def get_course_by_id(course_id, depth=0):
"""
Given a course id, return the corresponding course descriptor.
If course_id is not valid, raises a 404.
depth: The number of levels of children for the modulestore to cache. None means infinite depth
"""
try:
course_loc = CourseDescriptor.id_to_location(course_id)
return modulestore().get_instance(course_id, course_loc, depth=depth)
except (KeyError, ItemNotFoundError):
raise Http404("Course not found.")
except InvalidLocationError:
raise Http404("Invalid location")
def get_course_with_access(user, course_id, action, depth=0):
"""
Given a course_id, look up the corresponding course descriptor,
check that the user has the access to perform the specified action
on the course, and return the descriptor.
Raises a 404 if the course_id is invalid, or the user doesn't have access.
depth: The number of levels of children for the modulestore to cache. None means infinite depth
"""
course = get_course_by_id(course_id, depth=depth)
if not has_access(user, course, action):
# Deliberately return a non-specific error message to avoid
# leaking info about access control settings
raise Http404("Course not found.")
return course
def get_opt_course_with_access(user, course_id, action):
"""
Same as get_course_with_access, except that if course_id is None,
return None without performing any access checks.
"""
if course_id is None:
return None
return get_course_with_access(user, course_id, action)
def course_image_url(course):
"""Try to look up the image url for the course. If it's not found,
log an error and return the dead link"""
if course.static_asset_path or modulestore().get_modulestore_type(course.location.course_id) == XML_MODULESTORE_TYPE:
return '/static/' + (course.static_asset_path or getattr(course, 'data_dir', '')) + "/images/course_image.jpg"
else:
loc = StaticContent.compute_location(course.location.org, course.location.course, course.course_image)
_path = StaticContent.get_url_path_from_location(loc)
return _path
def find_file(filesystem, dirs, filename):
"""
Looks for a filename in a list of dirs on a filesystem, in the specified order.
filesystem: an OSFS filesystem
dirs: a list of path objects
filename: a string
Returns d / filename if found in dir d, else raises ResourceNotFoundError.
"""
for directory in dirs:
filepath = path(directory) / filename
if filesystem.exists(filepath):
return filepath
raise ResourceNotFoundError("Could not find {0}".format(filename))
def get_course_about_section(course, section_key):
"""
This returns the snippet of html to be rendered on the course about page,
given the key for the section.
Valid keys:
- overview
- title
- university
- number
- short_description
- description
- key_dates (includes start, end, exams, etc)
- video
- course_staff_short
- course_staff_extended
- requirements
- syllabus
- textbook
- faq
- more_info
- ocw_links
"""
# Many of these are stored as html files instead of some semantic
# markup. This can change without effecting this interface when we find a
# good format for defining so many snippets of text/html.
# TODO: Remove number, instructors from this list
if section_key in ['short_description', 'description', 'key_dates', 'video',
'course_staff_short', 'course_staff_extended',
'requirements', 'syllabus', 'textbook', 'faq', 'more_info',
'number', 'instructors', 'overview',
'effort', 'end_date', 'prerequisites', 'ocw_links']:
try:
request = get_request_for_thread()
loc = course.location.replace(category='about', name=section_key)
# Use an empty cache
field_data_cache = FieldDataCache([], course.id, request.user)
about_module = get_module(
request.user,
request,
loc,
field_data_cache,
course.id,
not_found_ok=True,
wrap_xmodule_display=False,
static_asset_path=course.static_asset_path
)
html = ''
if about_module is not None:
html = about_module.render('student_view').content
return html
except ItemNotFoundError:
log.warning("Missing about section {key} in course {url}".format(
key=section_key, url=course.location.url()))
return None
elif section_key == "title":
return course.display_name_with_default
elif section_key == "university":
return course.display_org_with_default
elif section_key == "number":
return course.display_number_with_default
raise KeyError("Invalid about key " + str(section_key))
def get_course_info_section(request, course, section_key):
"""
This returns the snippet of html to be rendered on the course info page,
given the key for the section.
Valid keys:
- handouts
- guest_handouts
- updates
- guest_updates
"""
loc = Location(course.location.tag, course.location.org, course.location.course, 'course_info', section_key)
# Use an empty cache
field_data_cache = FieldDataCache([], course.id, request.user)
info_module = get_module(
request.user,
request,
loc,
field_data_cache,
course.id,
wrap_xmodule_display=False,
static_asset_path=course.static_asset_path
)
html = ''
if info_module is not None:
html = info_module.render('student_view').content
return html
# TODO: Fix this such that these are pulled in as extra course-specific tabs.
# arjun will address this by the end of October if no one does so prior to
# then.
def get_course_syllabus_section(course, section_key):
"""
This returns the snippet of html to be rendered on the syllabus page,
given the key for the section.
Valid keys:
- syllabus
- guest_syllabus
"""
# Many of these are stored as html files instead of some semantic
# markup. This can change without effecting this interface when we find a
# good format for defining so many snippets of text/html.
if section_key in ['syllabus', 'guest_syllabus']:
try:
filesys = course.system.resources_fs
# first look for a run-specific version
dirs = [path("syllabus") / course.url_name, path("syllabus")]
filepath = find_file(filesys, dirs, section_key + ".html")
with filesys.open(filepath) as html_file:
return replace_static_urls(
html_file.read().decode('utf-8'),
getattr(course, 'data_dir', None),
course_id=course.location.course_id,
static_asset_path=course.static_asset_path,
)
except ResourceNotFoundError:
log.exception("Missing syllabus section {key} in course {url}".format(
key=section_key, url=course.location.url()))
return "! Syllabus missing !"
raise KeyError("Invalid about key " + str(section_key))
def get_courses_by_university(user, domain=None):
'''
Returns dict of lists of courses available, keyed by course.org (ie university).
Courses are sorted by course.number.
'''
# TODO: Clean up how 'error' is done.
# filter out any courses that errored.
visible_courses = get_courses(user, domain)
universities = defaultdict(list)
for course in visible_courses:
universities[course.org].append(course)
return universities
def get_courses(user, domain=None):
'''
Returns a list of courses available, sorted by course.number
'''
courses = branding.get_visible_courses()
courses = [c for c in courses if has_access(user, c, 'see_exists')]
courses = sorted(courses, key=lambda course: course.number)
return courses
def get_courses_by_search(domain=None):
'''
Returns a list of courses available, sorted by course.number
'''
courses = branding.get_visible_courses()
# courses = [c for c in courses if has_access(user, c, 'see_exists')]
courses = sorted(courses, key=lambda course: course.number)
return courses
def sort_by_announcement(courses):
"""
Sorts a list of courses by their announcement date. If the date is
not available, sort them by their start date.
"""
# Sort courses by how far are they from they start day
key = lambda course: course.sorting_score
courses = sorted(courses, key=key)
return courses
def get_cms_course_link(course):
"""
Returns a link to course_index for editing the course in cms,
assuming that the course is actually cms-backed.
"""
locator = loc_mapper().translate_location(
course.location.course_id, course.location, False, True
)
return "//" + settings.CMS_BASE + locator.url_reverse('course/', '')
| agpl-3.0 |
goddardl/cortex | test/IECore/JPEGImageReader.py | 12 | 7946 | ##########################################################################
#
# Copyright (c) 2007-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
from __future__ import with_statement
import unittest
import glob
import sys
from IECore import *
class TestJPEGReader(unittest.TestCase):
def testConstruction( self ):
r = Reader.create( "test/IECore/data/jpg/uvMap.512x256.jpg" )
self.assertEqual( type(r), JPEGImageReader )
def testCanRead( self ):
self.assert_( JPEGImageReader.canRead( "test/IECore/data/jpg/uvMap.512x256.jpg" ) )
self.assert_( JPEGImageReader.canRead( "test/IECore/data/jpg/uvMap.512x256.truncated.jpg" ) )
def testIsComplete( self ):
r = Reader.create( "test/IECore/data/jpg/uvMap.512x256.jpg" )
self.assertEqual( type(r), JPEGImageReader )
self.assert_( r.isComplete() )
r = Reader.create( "test/IECore/data/jpg/uvMap.512x256.truncated.jpg" )
self.assertEqual( type(r), JPEGImageReader )
self.assert_( not r.isComplete() )
def testChannelNames( self ):
r = Reader.create( "test/IECore/data/jpg/uvMap.512x256.jpg" )
self.assertEqual( type(r), JPEGImageReader )
channelNames = r.channelNames()
self.assertEqual( len( channelNames ), 3 )
self.assert_( "R" in channelNames )
self.assert_( "G" in channelNames )
self.assert_( "B" in channelNames )
r = Reader.create( "test/IECore/data/jpg/greyscaleCheckerBoard.jpg" )
self.assertEqual( type(r), JPEGImageReader )
channelNames = r.channelNames()
self.assertEqual( len( channelNames ), 1 )
self.assert_( channelNames[0]=="Y" )
def testReadHeader( self ):
r = Reader.create( "test/IECore/data/jpg/uvMap.512x256.jpg" )
self.assertEqual( type(r), JPEGImageReader )
h = r.readHeader()
channelNames = h['channelNames']
self.assertEqual( len( channelNames ), 3 )
self.assert_( "R" in channelNames )
self.assert_( "G" in channelNames )
self.assert_( "B" in channelNames )
self.assertEqual( h['displayWindow'], Box2iData( Box2i( V2i(0,0), V2i(511,255) ) ) )
self.assertEqual( h['dataWindow'], Box2iData( Box2i( V2i(0,0), V2i(511,255) ) ) )
def testRead( self ):
r = Reader.create( "test/IECore/data/jpg/uvMap.512x256.jpg" )
self.assertEqual( type(r), JPEGImageReader )
img = r.read()
self.assertEqual( type(img), ImagePrimitive )
self.assertEqual( img.displayWindow, Box2i( V2i( 0, 0 ), V2i( 511, 255 ) ) )
self.assertEqual( img.dataWindow, Box2i( V2i( 0, 0 ), V2i( 511, 255 ) ) )
def testReadChannel( self ):
r = Reader.create( "test/IECore/data/jpg/uvMap.512x256.jpg" )
self.assertEqual( type(r), JPEGImageReader )
red = r.readChannel( "R" )
self.assert_( red )
green = r.readChannel( "G" )
self.assert_( green )
blue = r.readChannel( "B" )
self.assert_( blue )
self.assertRaises( RuntimeError, r.readChannel, "NonExistantChannel" )
self.assertEqual( len(red), len(green) )
self.assertEqual( len(red), len(blue) )
self.assertEqual( len(red), 512 * 256 )
def testDataWindowRead( self ):
r = Reader.create( "test/IECore/data/jpg/uvMap.512x256.jpg" )
r['colorSpace'] = 'linear'
self.assertEqual( type(r), JPEGImageReader )
dataWindow = Box2i(
V2i(360, 160),
V2i(399, 199)
)
r.parameters()["dataWindow"].setValue( Box2iData( dataWindow ) )
img = r.read()
self.assertEqual( type(img), ImagePrimitive )
self.assertEqual( img.dataWindow, dataWindow )
self.assertEqual( img.displayWindow, Box2i( V2i( 0, 0 ), V2i( 511, 255 ) ) )
self.assertEqual( len(img["R"].data), 40 * 40 )
ipe = PrimitiveEvaluator.create( img )
self.assert_( ipe.R() )
self.assert_( ipe.G() )
self.assert_( ipe.B() )
self.failIf ( ipe.A() )
result = ipe.createResult()
# Check that the color at the bottom-right of the image is black - ordinarialy it would
# be yellow, but we're deliberately not reading the entire image
found = ipe.pointAtPixel( V2i( 511, 255 ), result )
self.assert_( found )
color = V3f(
result.floatPrimVar( ipe.R() ),
result.floatPrimVar( ipe.G() ),
result.floatPrimVar( ipe.B() )
)
expectedColor = V3f( 0, 0, 0 )
self.assert_( ( color - expectedColor).length() < 1.e-3 )
found = ipe.pointAtPixel( V2i( 380, 180 ), result )
self.assert_( found )
color = V3f(
result.floatPrimVar( ipe.R() ),
result.floatPrimVar( ipe.G() ),
result.floatPrimVar( ipe.B() )
)
expectedColor = V3f( 0.741211, 0.706055, 0 )
self.assert_( ( color - expectedColor).length() < 1.e-3 )
def testOrientation( self ) :
""" Test orientation of JPEG files """
r = Reader.create( "test/IECore/data/jpg/uvMap.512x256.jpg" )
r['colorSpace'] = 'linear'
img = r.read()
ipe = PrimitiveEvaluator.create( img )
self.assert_( ipe.R() )
self.assert_( ipe.G() )
self.assert_( ipe.B() )
self.failIf ( ipe.A() )
result = ipe.createResult()
colorMap = {
V2i( 0 , 0 ) : V3f( 0, 0, 0 ),
V2i( 511, 0 ) : V3f( 0.996, 0, 0 ),
V2i( 0, 255 ) : V3f( 0, 1, 0.004 ),
V2i( 511, 255 ) : V3f( 1, 1, 0.004 ),
}
for point, expectedColor in colorMap.items() :
found = ipe.pointAtPixel( point, result )
self.assert_( found )
color = V3f(
result.floatPrimVar( ipe.R() ),
result.floatPrimVar( ipe.G() ),
result.floatPrimVar( ipe.B() )
)
self.assert_( ( color - expectedColor).length() < 1.e-3 )
def testErrors( self ):
r = JPEGImageReader()
self.assertRaises( RuntimeError, r.read )
r = JPEGImageReader( "test/IECore/data/tiff/uvMap.512x256.8bit.tif" )
self.assertRaises( RuntimeError, r.read )
def testAll( self ):
fileNames = glob.glob( "test/IECore/data/jpg/*.jpg" ) + glob.glob( "test/IECore/data/jpg/*.jpeg" )
expectedFailures = "test/IECore/data/jpg/uvMap.512x256.truncated.jpg"
# Silence any warnings while the tests run
with NullMessageHandler() :
for f in fileNames:
r = JPEGImageReader( f )
if f in expectedFailures :
self.assertRaises( RuntimeError, r.read )
else:
self.assert_( JPEGImageReader.canRead( f ) )
self.failIf( CINImageReader.canRead( f ) )
self.failIf( EXRImageReader.canRead( f ) )
self.failIf( TIFFImageReader.canRead( f ) )
img = r.read()
self.assertEqual( type(img), ImagePrimitive )
self.assert_( img.arePrimitiveVariablesValid() )
if __name__ == "__main__":
unittest.main()
| bsd-3-clause |
adhish20/TwitterWithCassandra | twiss/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/chardetect.py | 1786 | 2504 | #!/usr/bin/env python
"""
Script which takes one or more file paths and reports on their detected
encodings
Example::
% chardetect somefile someotherfile
somefile: windows-1252 with confidence 0.5
someotherfile: ascii with confidence 1.0
If no paths are provided, it takes its input from stdin.
"""
from __future__ import absolute_import, print_function, unicode_literals
import argparse
import sys
from io import open
from chardet import __version__
from chardet.universaldetector import UniversalDetector
def description_of(lines, name='stdin'):
"""
Return a string describing the probable encoding of a file or
list of strings.
:param lines: The lines to get the encoding of.
:type lines: Iterable of bytes
:param name: Name of file or collection of lines
:type name: str
"""
u = UniversalDetector()
for line in lines:
u.feed(line)
u.close()
result = u.result
if result['encoding']:
return '{0}: {1} with confidence {2}'.format(name, result['encoding'],
result['confidence'])
else:
return '{0}: no result'.format(name)
def main(argv=None):
'''
Handles command line arguments and gets things started.
:param argv: List of arguments, as if specified on the command-line.
If None, ``sys.argv[1:]`` is used instead.
:type argv: list of str
'''
# Get command line arguments
parser = argparse.ArgumentParser(
description="Takes one or more file paths and reports their detected \
encodings",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
conflict_handler='resolve')
parser.add_argument('input',
help='File whose encoding we would like to determine.',
type=argparse.FileType('rb'), nargs='*',
default=[sys.stdin])
parser.add_argument('--version', action='version',
version='%(prog)s {0}'.format(__version__))
args = parser.parse_args(argv)
for f in args.input:
if f.isatty():
print("You are running chardetect interactively. Press " +
"CTRL-D twice at the start of a blank line to signal the " +
"end of your input. If you want help, run chardetect " +
"--help\n", file=sys.stderr)
print(description_of(f, f.name))
if __name__ == '__main__':
main()
| mit |
wenhuizhang/dist-sys-exercises | lec-10/hadoop/reduce.py | 17 | 1027 | #!/usr/bin/env python
from operator import itemgetter
import sys
current_word = None
current_count = 0
word = None
# input comes from STDIN
for line in sys.stdin:
# remove leading and trailing whitespace
line = line.strip()
# parse the input we got from mapper.py
word, count = line.split('\t', 1)
# convert count (currently a string) to int
try:
count = int(count)
except ValueError:
# count was not a number, so silently
# ignore/discard this line
continue
# this IF-switch only works because Hadoop sorts map output
# by key (here: word) before it is passed to the reducer
if current_word == word:
current_count += count
else:
if current_word:
# write result to STDOUT
print '%s\t%s' % (current_word, current_count)
current_count = count
current_word = word
# do not forget to output the last word if needed!
if current_word == word:
print '%s\t%s' % (current_word, current_count)
| mit |
a97001/comp2222prototype | node_modules/npm/node_modules/node-gyp/gyp/PRESUBMIT.py | 496 | 3373 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for GYP.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
PYLINT_BLACKLIST = [
# TODO: fix me.
# From SCons, not done in google style.
'test/lib/TestCmd.py',
'test/lib/TestCommon.py',
'test/lib/TestGyp.py',
# Needs style fix.
'pylib/gyp/generator/xcode.py',
]
PYLINT_DISABLED_WARNINGS = [
# TODO: fix me.
# Many tests include modules they don't use.
'W0611',
# Include order doesn't properly include local files?
'F0401',
# Some use of built-in names.
'W0622',
# Some unused variables.
'W0612',
# Operator not preceded/followed by space.
'C0323',
'C0322',
# Unnecessary semicolon.
'W0301',
# Unused argument.
'W0613',
# String has no effect (docstring in wrong place).
'W0105',
# Comma not followed by space.
'C0324',
# Access to a protected member.
'W0212',
# Bad indent.
'W0311',
# Line too long.
'C0301',
# Undefined variable.
'E0602',
# Not exception type specified.
'W0702',
# No member of that name.
'E1101',
# Dangerous default {}.
'W0102',
# Others, too many to sort.
'W0201', 'W0232', 'E1103', 'W0621', 'W0108', 'W0223', 'W0231',
'R0201', 'E0101', 'C0321',
# ************* Module copy
# W0104:427,12:_test.odict.__setitem__: Statement seems to have no effect
'W0104',
]
def CheckChangeOnUpload(input_api, output_api):
report = []
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api))
return report
def CheckChangeOnCommit(input_api, output_api):
report = []
# Accept any year number from 2009 to the current year.
current_year = int(input_api.time.strftime('%Y'))
allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1)))
years_re = '(' + '|'.join(allowed_years) + ')'
# The (c) is deprecated, but tolerate it until it's removed from all files.
license = (
r'.*? Copyright (\(c\) )?%(year)s Google Inc\. All rights reserved\.\n'
r'.*? Use of this source code is governed by a BSD-style license that '
r'can be\n'
r'.*? found in the LICENSE file\.\n'
) % {
'year': years_re,
}
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api, license_header=license))
report.extend(input_api.canned_checks.CheckTreeIsOpen(
input_api, output_api,
'http://gyp-status.appspot.com/status',
'http://gyp-status.appspot.com/current'))
import os
import sys
old_sys_path = sys.path
try:
sys.path = ['pylib', 'test/lib'] + sys.path
blacklist = PYLINT_BLACKLIST
if sys.platform == 'win32':
blacklist = [os.path.normpath(x).replace('\\', '\\\\')
for x in PYLINT_BLACKLIST]
report.extend(input_api.canned_checks.RunPylint(
input_api,
output_api,
black_list=blacklist,
disabled_warnings=PYLINT_DISABLED_WARNINGS))
finally:
sys.path = old_sys_path
return report
def GetPreferredTrySlaves():
return ['gyp-win32', 'gyp-win64', 'gyp-linux', 'gyp-mac', 'gyp-android']
| gpl-3.0 |
dbcli/vcli | tests/features/db_utils.py | 1 | 1512 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import print_function
from vertica_python import connect
def create_schema(hostname='localhost', username=None, password=None,
dbname=None, port=None, schema_name=None):
"""Create test schema."""
cn = create_cn(hostname, password, username, dbname, port)
with cn.cursor() as cr:
cr.execute('DROP SCHEMA IF EXISTS %s CASCADE' % dbname)
cr.execute('CREATE SCHEMA %s' % dbname)
cn.close()
cn = create_cn(hostname, password, username, dbname, port)
return cn
def create_cn(hostname, password, username, dbname, port):
"""
Open connection to database.
:param hostname:
:param password:
:param username:
:param dbname: string
:return: vertica_python.Connection
"""
cn = connect(host=hostname, user=username, database=dbname,
password=password, port=port)
print('Created connection: {0}.'.format(hostname))
return cn
def drop_schema(hostname='localhost', username=None, password=None,
dbname=None, port=None):
cn = create_cn(hostname, password, username, dbname, port)
with cn.cursor() as cr:
cr.execute('DROP SCHEMA IF EXISTS %s CASCADE' % dbname)
close_cn(cn)
def close_cn(cn=None):
"""
Close connection.
:param connection: vertica_python.connection
"""
if cn:
cn.close()
print('Closed connection: {0}.'.format(cn.options['host']))
| bsd-3-clause |
droidlabour/git_intgrtn_aws_s3 | CreateSSHKey/Crypto/SelfTest/PublicKey/test_DSA.py | 118 | 9861 | # -*- coding: utf-8 -*-
#
# SelfTest/PublicKey/test_DSA.py: Self-test for the DSA primitive
#
# Written in 2008 by Dwayne C. Litzenberger <dlitz@dlitz.net>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test suite for Crypto.PublicKey.DSA"""
__revision__ = "$Id$"
import sys
import os
if sys.version_info[0] == 2 and sys.version_info[1] == 1:
from Crypto.Util.py21compat import *
from Crypto.Util.py3compat import *
import unittest
from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex
def _sws(s):
"""Remove whitespace from a text or byte string"""
if isinstance(s,str):
return "".join(s.split())
else:
return b("").join(s.split())
class DSATest(unittest.TestCase):
# Test vector from "Appendix 5. Example of the DSA" of
# "Digital Signature Standard (DSS)",
# U.S. Department of Commerce/National Institute of Standards and Technology
# FIPS 186-2 (+Change Notice), 2000 January 27.
# http://csrc.nist.gov/publications/fips/fips186-2/fips186-2-change1.pdf
y = _sws("""19131871 d75b1612 a819f29d 78d1b0d7 346f7aa7 7bb62a85
9bfd6c56 75da9d21 2d3a36ef 1672ef66 0b8c7c25 5cc0ec74
858fba33 f44c0669 9630a76b 030ee333""")
g = _sws("""626d0278 39ea0a13 413163a5 5b4cb500 299d5522 956cefcb
3bff10f3 99ce2c2e 71cb9de5 fa24babf 58e5b795 21925c9c
c42e9f6f 464b088c c572af53 e6d78802""")
p = _sws("""8df2a494 492276aa 3d25759b b06869cb eac0d83a fb8d0cf7
cbb8324f 0d7882e5 d0762fc5 b7210eaf c2e9adac 32ab7aac
49693dfb f83724c2 ec0736ee 31c80291""")
q = _sws("""c773218c 737ec8ee 993b4f2d ed30f48e dace915f""")
x = _sws("""2070b322 3dba372f de1c0ffc 7b2e3b49 8b260614""")
k = _sws("""358dad57 1462710f 50e254cf 1a376b2b deaadfbf""")
k_inverse = _sws("""0d516729 8202e49b 4116ac10 4fc3f415 ae52f917""")
m = b2a_hex(b("abc"))
m_hash = _sws("""a9993e36 4706816a ba3e2571 7850c26c 9cd0d89d""")
r = _sws("""8bac1ab6 6410435c b7181f95 b16ab97c 92b341c0""")
s = _sws("""41e2345f 1f56df24 58f426d1 55b4ba2d b6dcd8c8""")
def setUp(self):
global DSA, Random, bytes_to_long, size
from Crypto.PublicKey import DSA
from Crypto import Random
from Crypto.Util.number import bytes_to_long, inverse, size
self.dsa = DSA
def test_generate_1arg(self):
"""DSA (default implementation) generated key (1 argument)"""
dsaObj = self.dsa.generate(1024)
self._check_private_key(dsaObj)
pub = dsaObj.publickey()
self._check_public_key(pub)
def test_generate_2arg(self):
"""DSA (default implementation) generated key (2 arguments)"""
dsaObj = self.dsa.generate(1024, Random.new().read)
self._check_private_key(dsaObj)
pub = dsaObj.publickey()
self._check_public_key(pub)
def test_construct_4tuple(self):
"""DSA (default implementation) constructed key (4-tuple)"""
(y, g, p, q) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q)]
dsaObj = self.dsa.construct((y, g, p, q))
self._test_verification(dsaObj)
def test_construct_5tuple(self):
"""DSA (default implementation) constructed key (5-tuple)"""
(y, g, p, q, x) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q, self.x)]
dsaObj = self.dsa.construct((y, g, p, q, x))
self._test_signing(dsaObj)
self._test_verification(dsaObj)
def _check_private_key(self, dsaObj):
# Check capabilities
self.assertEqual(1, dsaObj.has_private())
self.assertEqual(1, dsaObj.can_sign())
self.assertEqual(0, dsaObj.can_encrypt())
self.assertEqual(0, dsaObj.can_blind())
# Check dsaObj.[ygpqx] -> dsaObj.key.[ygpqx] mapping
self.assertEqual(dsaObj.y, dsaObj.key.y)
self.assertEqual(dsaObj.g, dsaObj.key.g)
self.assertEqual(dsaObj.p, dsaObj.key.p)
self.assertEqual(dsaObj.q, dsaObj.key.q)
self.assertEqual(dsaObj.x, dsaObj.key.x)
# Sanity check key data
self.assertEqual(1, dsaObj.p > dsaObj.q) # p > q
self.assertEqual(160, size(dsaObj.q)) # size(q) == 160 bits
self.assertEqual(0, (dsaObj.p - 1) % dsaObj.q) # q is a divisor of p-1
self.assertEqual(dsaObj.y, pow(dsaObj.g, dsaObj.x, dsaObj.p)) # y == g**x mod p
self.assertEqual(1, 0 < dsaObj.x < dsaObj.q) # 0 < x < q
def _check_public_key(self, dsaObj):
k = a2b_hex(self.k)
m_hash = a2b_hex(self.m_hash)
# Check capabilities
self.assertEqual(0, dsaObj.has_private())
self.assertEqual(1, dsaObj.can_sign())
self.assertEqual(0, dsaObj.can_encrypt())
self.assertEqual(0, dsaObj.can_blind())
# Check dsaObj.[ygpq] -> dsaObj.key.[ygpq] mapping
self.assertEqual(dsaObj.y, dsaObj.key.y)
self.assertEqual(dsaObj.g, dsaObj.key.g)
self.assertEqual(dsaObj.p, dsaObj.key.p)
self.assertEqual(dsaObj.q, dsaObj.key.q)
# Check that private parameters are all missing
self.assertEqual(0, hasattr(dsaObj, 'x'))
self.assertEqual(0, hasattr(dsaObj.key, 'x'))
# Sanity check key data
self.assertEqual(1, dsaObj.p > dsaObj.q) # p > q
self.assertEqual(160, size(dsaObj.q)) # size(q) == 160 bits
self.assertEqual(0, (dsaObj.p - 1) % dsaObj.q) # q is a divisor of p-1
# Public-only key objects should raise an error when .sign() is called
self.assertRaises(TypeError, dsaObj.sign, m_hash, k)
# Check __eq__ and __ne__
self.assertEqual(dsaObj.publickey() == dsaObj.publickey(),True) # assert_
self.assertEqual(dsaObj.publickey() != dsaObj.publickey(),False) # failIf
def _test_signing(self, dsaObj):
k = a2b_hex(self.k)
m_hash = a2b_hex(self.m_hash)
r = bytes_to_long(a2b_hex(self.r))
s = bytes_to_long(a2b_hex(self.s))
(r_out, s_out) = dsaObj.sign(m_hash, k)
self.assertEqual((r, s), (r_out, s_out))
def _test_verification(self, dsaObj):
m_hash = a2b_hex(self.m_hash)
r = bytes_to_long(a2b_hex(self.r))
s = bytes_to_long(a2b_hex(self.s))
self.assertEqual(1, dsaObj.verify(m_hash, (r, s)))
self.assertEqual(0, dsaObj.verify(m_hash + b("\0"), (r, s)))
class DSAFastMathTest(DSATest):
def setUp(self):
DSATest.setUp(self)
self.dsa = DSA.DSAImplementation(use_fast_math=True)
def test_generate_1arg(self):
"""DSA (_fastmath implementation) generated key (1 argument)"""
DSATest.test_generate_1arg(self)
def test_generate_2arg(self):
"""DSA (_fastmath implementation) generated key (2 arguments)"""
DSATest.test_generate_2arg(self)
def test_construct_4tuple(self):
"""DSA (_fastmath implementation) constructed key (4-tuple)"""
DSATest.test_construct_4tuple(self)
def test_construct_5tuple(self):
"""DSA (_fastmath implementation) constructed key (5-tuple)"""
DSATest.test_construct_5tuple(self)
class DSASlowMathTest(DSATest):
def setUp(self):
DSATest.setUp(self)
self.dsa = DSA.DSAImplementation(use_fast_math=False)
def test_generate_1arg(self):
"""DSA (_slowmath implementation) generated key (1 argument)"""
DSATest.test_generate_1arg(self)
def test_generate_2arg(self):
"""DSA (_slowmath implementation) generated key (2 arguments)"""
DSATest.test_generate_2arg(self)
def test_construct_4tuple(self):
"""DSA (_slowmath implementation) constructed key (4-tuple)"""
DSATest.test_construct_4tuple(self)
def test_construct_5tuple(self):
"""DSA (_slowmath implementation) constructed key (5-tuple)"""
DSATest.test_construct_5tuple(self)
def get_tests(config={}):
tests = []
tests += list_test_cases(DSATest)
try:
from Crypto.PublicKey import _fastmath
tests += list_test_cases(DSAFastMathTest)
except ImportError:
from distutils.sysconfig import get_config_var
import inspect
_fm_path = os.path.normpath(os.path.dirname(os.path.abspath(
inspect.getfile(inspect.currentframe())))
+"/../../PublicKey/_fastmath"+get_config_var("SO"))
if os.path.exists(_fm_path):
raise ImportError("While the _fastmath module exists, importing "+
"it failed. This may point to the gmp or mpir shared library "+
"not being in the path. _fastmath was found at "+_fm_path)
tests += list_test_cases(DSASlowMathTest)
return tests
if __name__ == '__main__':
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| mit |
jmanday/Master | TFM/library/boost_1_63_0/tools/build/test/core_actions_quietly.py | 51 | 1038 | #!/usr/bin/python
# Copyright 2007 Rene Rivera.
# Copyright 2011 Steven Watanabe
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import BoostBuild
t = BoostBuild.Tester(pass_toolset=0)
t.write("file.jam", """\
actions quietly .a.
{
echo [$(<:B)] 0
echo [$(<:B)] 1
echo [$(<:B)] 2
}
rule .a.
{
DEPENDS $(<) : $(>) ;
}
NOTFILE subtest ;
.a. subtest_a : subtest ;
.a. subtest_b : subtest ;
DEPENDS all : subtest_a subtest_b ;
""")
t.run_build_system(["-ffile.jam", "-d2"], stdout="""\
...found 4 targets...
...updating 2 targets...
.a. subtest_a
echo [subtest_a] 0
echo [subtest_a] 1
echo [subtest_a] 2
[subtest_a] 0
[subtest_a] 1
[subtest_a] 2
.a. subtest_b
echo [subtest_b] 0
echo [subtest_b] 1
echo [subtest_b] 2
[subtest_b] 0
[subtest_b] 1
[subtest_b] 2
...updated 2 targets...
""")
t.run_build_system(["-ffile.jam", "-d1"], stdout="""\
...found 4 targets...
...updating 2 targets...
...updated 2 targets...
""")
t.cleanup()
| apache-2.0 |
ForgottenKahz/CloudOPC | venv/Lib/site-packages/pip/_vendor/six.py | 878 | 29664 | """Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
import functools
import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.9.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
try:
# This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return iter(d.iterkeys(**kw))
def itervalues(d, **kw):
return iter(d.itervalues(**kw))
def iteritems(d, **kw):
return iter(d.iteritems(**kw))
def iterlists(d, **kw):
return iter(d.iterlists(**kw))
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
_assertCountEqual = "assertCountEqual"
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
def assertCountEqual(self, *args, **kwargs):
return getattr(self, _assertCountEqual)(*args, **kwargs)
def assertRaisesRegex(self, *args, **kwargs):
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
if sys.version_info[:2] == (3, 2):
exec_("""def raise_from(value, from_value):
if from_value is None:
raise value
raise value from from_value
""")
elif sys.version_info[:2] > (3, 2):
exec_("""def raise_from(value, from_value):
raise value from from_value
""")
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
if sys.version_info[:2] < (3, 3):
_print = print_
def print_(*args, **kwargs):
fp = kwargs.get("file", sys.stdout)
flush = kwargs.pop("flush", False)
_print(*args, **kwargs)
if flush and fp is not None:
fp.flush()
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped, assigned, updated)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
klass.__name__)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
| mit |
wkschwartz/django | tests/generic_views/test_base.py | 10 | 22004 | import time
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpResponse
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.utils import require_jinja2
from django.urls import resolve
from django.views.generic import RedirectView, TemplateView, View
from . import views
class SimpleView(View):
"""
A simple view with a docstring.
"""
def get(self, request):
return HttpResponse('This is a simple view')
class SimplePostView(SimpleView):
post = SimpleView.get
class PostOnlyView(View):
def post(self, request):
return HttpResponse('This view only accepts POST')
class CustomizableView(SimpleView):
parameter = {}
def decorator(view):
view.is_decorated = True
return view
class DecoratedDispatchView(SimpleView):
@decorator
def dispatch(self, request, *args, **kwargs):
return super().dispatch(request, *args, **kwargs)
class AboutTemplateView(TemplateView):
def get(self, request):
return self.render_to_response({})
def get_template_names(self):
return ['generic_views/about.html']
class AboutTemplateAttributeView(TemplateView):
template_name = 'generic_views/about.html'
def get(self, request):
return self.render_to_response(context={})
class InstanceView(View):
def get(self, request):
return self
class ViewTest(SimpleTestCase):
rf = RequestFactory()
def _assert_simple(self, response):
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'This is a simple view')
def test_no_init_kwargs(self):
"""
A view can't be accidentally instantiated before deployment
"""
msg = 'This method is available only on the class, not on instances.'
with self.assertRaisesMessage(AttributeError, msg):
SimpleView(key='value').as_view()
def test_no_init_args(self):
"""
A view can't be accidentally instantiated before deployment
"""
msg = 'as_view() takes 1 positional argument but 2 were given'
with self.assertRaisesMessage(TypeError, msg):
SimpleView.as_view('value')
def test_pathological_http_method(self):
"""
The edge case of a http request that spoofs an existing method name is caught.
"""
self.assertEqual(SimpleView.as_view()(
self.rf.get('/', REQUEST_METHOD='DISPATCH')
).status_code, 405)
def test_get_only(self):
"""
Test a view which only allows GET doesn't allow other methods.
"""
self._assert_simple(SimpleView.as_view()(self.rf.get('/')))
self.assertEqual(SimpleView.as_view()(self.rf.post('/')).status_code, 405)
self.assertEqual(SimpleView.as_view()(
self.rf.get('/', REQUEST_METHOD='FAKE')
).status_code, 405)
def test_get_and_head(self):
"""
Test a view which supplies a GET method also responds correctly to HEAD.
"""
self._assert_simple(SimpleView.as_view()(self.rf.get('/')))
response = SimpleView.as_view()(self.rf.head('/'))
self.assertEqual(response.status_code, 200)
def test_setup_get_and_head(self):
view_instance = SimpleView()
self.assertFalse(hasattr(view_instance, 'head'))
view_instance.setup(self.rf.get('/'))
self.assertTrue(hasattr(view_instance, 'head'))
self.assertEqual(view_instance.head, view_instance.get)
def test_head_no_get(self):
"""
Test a view which supplies no GET method responds to HEAD with HTTP 405.
"""
response = PostOnlyView.as_view()(self.rf.head('/'))
self.assertEqual(response.status_code, 405)
def test_get_and_post(self):
"""
Test a view which only allows both GET and POST.
"""
self._assert_simple(SimplePostView.as_view()(self.rf.get('/')))
self._assert_simple(SimplePostView.as_view()(self.rf.post('/')))
self.assertEqual(SimplePostView.as_view()(
self.rf.get('/', REQUEST_METHOD='FAKE')
).status_code, 405)
def test_invalid_keyword_argument(self):
"""
View arguments must be predefined on the class and can't
be named like a HTTP method.
"""
msg = (
'The method name %s is not accepted as a keyword argument to '
'SimpleView().'
)
# Check each of the allowed method names
for method in SimpleView.http_method_names:
with self.assertRaisesMessage(TypeError, msg % method):
SimpleView.as_view(**{method: 'value'})
# Check the case view argument is ok if predefined on the class...
CustomizableView.as_view(parameter="value")
# ...but raises errors otherwise.
msg = (
"CustomizableView() received an invalid keyword 'foobar'. "
"as_view only accepts arguments that are already attributes of "
"the class."
)
with self.assertRaisesMessage(TypeError, msg):
CustomizableView.as_view(foobar="value")
def test_calling_more_than_once(self):
"""
Test a view can only be called once.
"""
request = self.rf.get('/')
view = InstanceView.as_view()
self.assertNotEqual(view(request), view(request))
def test_class_attributes(self):
"""
The callable returned from as_view() has proper
docstring, name and module.
"""
self.assertEqual(SimpleView.__doc__, SimpleView.as_view().__doc__)
self.assertEqual(SimpleView.__name__, SimpleView.as_view().__name__)
self.assertEqual(SimpleView.__module__, SimpleView.as_view().__module__)
def test_dispatch_decoration(self):
"""
Attributes set by decorators on the dispatch method
are also present on the closure.
"""
self.assertTrue(DecoratedDispatchView.as_view().is_decorated)
def test_options(self):
"""
Views respond to HTTP OPTIONS requests with an Allow header
appropriate for the methods implemented by the view class.
"""
request = self.rf.options('/')
view = SimpleView.as_view()
response = view(request)
self.assertEqual(200, response.status_code)
self.assertTrue(response.headers['Allow'])
def test_options_for_get_view(self):
"""
A view implementing GET allows GET and HEAD.
"""
request = self.rf.options('/')
view = SimpleView.as_view()
response = view(request)
self._assert_allows(response, 'GET', 'HEAD')
def test_options_for_get_and_post_view(self):
"""
A view implementing GET and POST allows GET, HEAD, and POST.
"""
request = self.rf.options('/')
view = SimplePostView.as_view()
response = view(request)
self._assert_allows(response, 'GET', 'HEAD', 'POST')
def test_options_for_post_view(self):
"""
A view implementing POST allows POST.
"""
request = self.rf.options('/')
view = PostOnlyView.as_view()
response = view(request)
self._assert_allows(response, 'POST')
def _assert_allows(self, response, *expected_methods):
"Assert allowed HTTP methods reported in the Allow response header"
response_allows = set(response.headers['Allow'].split(', '))
self.assertEqual(set(expected_methods + ('OPTIONS',)), response_allows)
def test_args_kwargs_request_on_self(self):
"""
Test a view only has args, kwargs & request once `as_view`
has been called.
"""
bare_view = InstanceView()
view = InstanceView.as_view()(self.rf.get('/'))
for attribute in ('args', 'kwargs', 'request'):
self.assertNotIn(attribute, dir(bare_view))
self.assertIn(attribute, dir(view))
def test_overridden_setup(self):
class SetAttributeMixin:
def setup(self, request, *args, **kwargs):
self.attr = True
super().setup(request, *args, **kwargs)
class CheckSetupView(SetAttributeMixin, SimpleView):
def dispatch(self, request, *args, **kwargs):
assert hasattr(self, 'attr')
return super().dispatch(request, *args, **kwargs)
response = CheckSetupView.as_view()(self.rf.get('/'))
self.assertEqual(response.status_code, 200)
def test_not_calling_parent_setup_error(self):
class TestView(View):
def setup(self, request, *args, **kwargs):
pass # Not calling super().setup()
msg = (
"TestView instance has no 'request' attribute. Did you override "
"setup() and forget to call super()?"
)
with self.assertRaisesMessage(AttributeError, msg):
TestView.as_view()(self.rf.get('/'))
def test_setup_adds_args_kwargs_request(self):
request = self.rf.get('/')
args = ('arg 1', 'arg 2')
kwargs = {'kwarg_1': 1, 'kwarg_2': 'year'}
view = View()
view.setup(request, *args, **kwargs)
self.assertEqual(request, view.request)
self.assertEqual(args, view.args)
self.assertEqual(kwargs, view.kwargs)
def test_direct_instantiation(self):
"""
It should be possible to use the view by directly instantiating it
without going through .as_view() (#21564).
"""
view = PostOnlyView()
response = view.dispatch(self.rf.head('/'))
self.assertEqual(response.status_code, 405)
@override_settings(ROOT_URLCONF='generic_views.urls')
class TemplateViewTest(SimpleTestCase):
rf = RequestFactory()
def _assert_about(self, response):
response.render()
self.assertContains(response, '<h1>About</h1>')
def test_get(self):
"""
Test a view that simply renders a template on GET
"""
self._assert_about(AboutTemplateView.as_view()(self.rf.get('/about/')))
def test_head(self):
"""
Test a TemplateView responds correctly to HEAD
"""
response = AboutTemplateView.as_view()(self.rf.head('/about/'))
self.assertEqual(response.status_code, 200)
def test_get_template_attribute(self):
"""
Test a view that renders a template on GET with the template name as
an attribute on the class.
"""
self._assert_about(AboutTemplateAttributeView.as_view()(self.rf.get('/about/')))
def test_get_generic_template(self):
"""
Test a completely generic view that renders a template on GET
with the template name as an argument at instantiation.
"""
self._assert_about(TemplateView.as_view(template_name='generic_views/about.html')(self.rf.get('/about/')))
def test_template_name_required(self):
"""
A template view must provide a template name.
"""
msg = (
"TemplateResponseMixin requires either a definition of "
"'template_name' or an implementation of 'get_template_names()'"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get('/template/no_template/')
@require_jinja2
def test_template_engine(self):
"""
A template view may provide a template engine.
"""
request = self.rf.get('/using/')
view = TemplateView.as_view(template_name='generic_views/using.html')
self.assertEqual(view(request).render().content, b'DTL\n')
view = TemplateView.as_view(template_name='generic_views/using.html', template_engine='django')
self.assertEqual(view(request).render().content, b'DTL\n')
view = TemplateView.as_view(template_name='generic_views/using.html', template_engine='jinja2')
self.assertEqual(view(request).render().content, b'Jinja2\n')
def test_template_params(self):
"""
A generic template view passes kwargs as context.
"""
response = self.client.get('/template/simple/bar/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['foo'], 'bar')
self.assertIsInstance(response.context['view'], View)
def test_extra_template_params(self):
"""
A template view can be customized to return extra context.
"""
response = self.client.get('/template/custom/bar/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['foo'], 'bar')
self.assertEqual(response.context['key'], 'value')
self.assertIsInstance(response.context['view'], View)
def test_cached_views(self):
"""
A template view can be cached
"""
response = self.client.get('/template/cached/bar/')
self.assertEqual(response.status_code, 200)
time.sleep(1.0)
response2 = self.client.get('/template/cached/bar/')
self.assertEqual(response2.status_code, 200)
self.assertEqual(response.content, response2.content)
time.sleep(2.0)
# Let the cache expire and test again
response2 = self.client.get('/template/cached/bar/')
self.assertEqual(response2.status_code, 200)
self.assertNotEqual(response.content, response2.content)
def test_content_type(self):
response = self.client.get('/template/content_type/')
self.assertEqual(response.headers['Content-Type'], 'text/plain')
def test_resolve_view(self):
match = resolve('/template/content_type/')
self.assertIs(match.func.view_class, TemplateView)
self.assertEqual(match.func.view_initkwargs['content_type'], 'text/plain')
def test_resolve_login_required_view(self):
match = resolve('/template/login_required/')
self.assertIs(match.func.view_class, TemplateView)
def test_extra_context(self):
response = self.client.get('/template/extra_context/')
self.assertEqual(response.context['title'], 'Title')
@override_settings(ROOT_URLCONF='generic_views.urls')
class RedirectViewTest(SimpleTestCase):
rf = RequestFactory()
def test_no_url(self):
"Without any configuration, returns HTTP 410 GONE"
response = RedirectView.as_view()(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 410)
def test_default_redirect(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_permanent_redirect(self):
"Permanent redirects are an option"
response = RedirectView.as_view(url='/bar/', permanent=True)(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 301)
self.assertEqual(response.url, '/bar/')
def test_temporary_redirect(self):
"Temporary redirects are an option"
response = RedirectView.as_view(url='/bar/', permanent=False)(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_include_args(self):
"GET arguments can be included in the redirected URL"
response = RedirectView.as_view(url='/bar/')(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
response = RedirectView.as_view(url='/bar/', query_string=True)(self.rf.get('/foo/?pork=spam'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/?pork=spam')
def test_include_urlencoded_args(self):
"GET arguments can be URL-encoded when included in the redirected URL"
response = RedirectView.as_view(url='/bar/', query_string=True)(
self.rf.get('/foo/?unicode=%E2%9C%93'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/?unicode=%E2%9C%93')
def test_parameter_substitution(self):
"Redirection URLs can be parameterized"
response = RedirectView.as_view(url='/bar/%(object_id)d/')(self.rf.get('/foo/42/'), object_id=42)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/42/')
def test_named_url_pattern(self):
"Named pattern parameter should reverse to the matching pattern"
response = RedirectView.as_view(pattern_name='artist_detail')(self.rf.get('/foo/'), pk=1)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.headers['Location'], '/detail/artist/1/')
def test_named_url_pattern_using_args(self):
response = RedirectView.as_view(pattern_name='artist_detail')(self.rf.get('/foo/'), 1)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.headers['Location'], '/detail/artist/1/')
def test_redirect_POST(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.post('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_HEAD(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.head('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_OPTIONS(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.options('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_PUT(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.put('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_PATCH(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.patch('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_DELETE(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.delete('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_when_meta_contains_no_query_string(self):
"regression for #16705"
# we can't use self.rf.get because it always sets QUERY_STRING
response = RedirectView.as_view(url='/bar/')(self.rf.request(PATH_INFO='/foo/'))
self.assertEqual(response.status_code, 302)
def test_direct_instantiation(self):
"""
It should be possible to use the view without going through .as_view()
(#21564).
"""
view = RedirectView()
response = view.dispatch(self.rf.head('/foo/'))
self.assertEqual(response.status_code, 410)
class GetContextDataTest(SimpleTestCase):
def test_get_context_data_super(self):
test_view = views.CustomContextView()
context = test_view.get_context_data(kwarg_test='kwarg_value')
# the test_name key is inserted by the test classes parent
self.assertIn('test_name', context)
self.assertEqual(context['kwarg_test'], 'kwarg_value')
self.assertEqual(context['custom_key'], 'custom_value')
# test that kwarg overrides values assigned higher up
context = test_view.get_context_data(test_name='test_value')
self.assertEqual(context['test_name'], 'test_value')
def test_object_at_custom_name_in_context_data(self):
# Checks 'pony' key presence in dict returned by get_context_date
test_view = views.CustomSingleObjectView()
test_view.context_object_name = 'pony'
context = test_view.get_context_data()
self.assertEqual(context['pony'], test_view.object)
def test_object_in_get_context_data(self):
# Checks 'object' key presence in dict returned by get_context_date #20234
test_view = views.CustomSingleObjectView()
context = test_view.get_context_data()
self.assertEqual(context['object'], test_view.object)
class UseMultipleObjectMixinTest(SimpleTestCase):
rf = RequestFactory()
def test_use_queryset_from_view(self):
test_view = views.CustomMultipleObjectMixinView()
test_view.get(self.rf.get('/'))
# Don't pass queryset as argument
context = test_view.get_context_data()
self.assertEqual(context['object_list'], test_view.queryset)
def test_overwrite_queryset(self):
test_view = views.CustomMultipleObjectMixinView()
test_view.get(self.rf.get('/'))
queryset = [{'name': 'Lennon'}, {'name': 'Ono'}]
self.assertNotEqual(test_view.queryset, queryset)
# Overwrite the view's queryset with queryset from kwarg
context = test_view.get_context_data(object_list=queryset)
self.assertEqual(context['object_list'], queryset)
class SingleObjectTemplateResponseMixinTest(SimpleTestCase):
def test_template_mixin_without_template(self):
"""
We want to makes sure that if you use a template mixin, but forget the
template, it still tells you it's ImproperlyConfigured instead of
TemplateDoesNotExist.
"""
view = views.TemplateResponseWithoutTemplate()
msg = (
"TemplateResponseMixin requires either a definition of "
"'template_name' or an implementation of 'get_template_names()'"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
view.get_template_names()
| bsd-3-clause |
deginner/bitjws | tests/test_multisig.py | 1 | 1987 | import json
import pytest
import bitjws
def test_encode_decode():
key1 = bitjws.PrivateKey()
pubkey1 = bitjws.pubkey_to_addr(key1.pubkey.serialize())
key2 = bitjws.PrivateKey()
pubkey2 = bitjws.pubkey_to_addr(key2.pubkey.serialize())
ser = bitjws.multisig_sign_serialize([key1, key2])
headers, payload = bitjws.multisig_validate_deserialize(ser)
rawpayload = json.loads(ser)['payload']
origpayload = bitjws.base64url_decode(rawpayload.encode('utf8'))
keys_found = {pubkey1: False, pubkey2: False}
assert len(headers) == 2
for h in headers:
assert len(h) == 3
assert h['typ'] == 'JWT'
assert h['alg'] == 'CUSTOM-BITCOIN-SIGN'
assert h['kid'] in keys_found
assert keys_found[h['kid']] == False
keys_found[h['kid']] = True
assert all(keys_found.values())
assert isinstance(payload.get('exp', ''), (float, int))
assert payload['aud'] is None
assert len(payload) == 2
assert payload == json.loads(origpayload.decode('utf8'))
def test_payload_nojson():
key = bitjws.PrivateKey()
# Use a payload that is not JSON encoded.
ser = json.loads(bitjws.multisig_sign_serialize([key]))
ser['payload'] = bitjws.base64url_encode(b'test').decode('utf8')
# Sign the new payload.
signdata = '{}.{}'.format(ser['signatures'][0]['protected'], ser['payload'])
sig = bitjws.ALGORITHM_AVAILABLE['CUSTOM-BITCOIN-SIGN'].sign(
key, signdata)
sig64 = bitjws.base64url_encode(sig).decode('utf8')
ser['signatures'][0]['signature'] = sig64
serenc = json.dumps(ser)
with pytest.raises(bitjws.InvalidMessage):
# The new payload was not JSON encoded, so it cannot be
# decoded as that.
bitjws.multisig_validate_deserialize(serenc)
# But we can get its raw value.
headers, payload = bitjws.multisig_validate_deserialize(serenc,
decode_payload=False)
assert len(headers) == 1
assert payload == b'test'
| mit |
pschmitt/home-assistant | homeassistant/components/rainmachine/sensor.py | 6 | 5800 | """This platform provides support for sensor data from RainMachine."""
import logging
from homeassistant.const import TEMP_CELSIUS
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import RainMachineEntity
from .const import (
DATA_CLIENT,
DATA_PROVISION_SETTINGS,
DATA_RESTRICTIONS_UNIVERSAL,
DOMAIN as RAINMACHINE_DOMAIN,
SENSOR_UPDATE_TOPIC,
)
_LOGGER = logging.getLogger(__name__)
TYPE_FLOW_SENSOR_CLICK_M3 = "flow_sensor_clicks_cubic_meter"
TYPE_FLOW_SENSOR_CONSUMED_LITERS = "flow_sensor_consumed_liters"
TYPE_FLOW_SENSOR_START_INDEX = "flow_sensor_start_index"
TYPE_FLOW_SENSOR_WATERING_CLICKS = "flow_sensor_watering_clicks"
TYPE_FREEZE_TEMP = "freeze_protect_temp"
SENSORS = {
TYPE_FLOW_SENSOR_CLICK_M3: (
"Flow Sensor Clicks",
"mdi:water-pump",
"clicks/m^3",
None,
False,
DATA_PROVISION_SETTINGS,
),
TYPE_FLOW_SENSOR_CONSUMED_LITERS: (
"Flow Sensor Consumed Liters",
"mdi:water-pump",
"liter",
None,
False,
DATA_PROVISION_SETTINGS,
),
TYPE_FLOW_SENSOR_START_INDEX: (
"Flow Sensor Start Index",
"mdi:water-pump",
"index",
None,
False,
DATA_PROVISION_SETTINGS,
),
TYPE_FLOW_SENSOR_WATERING_CLICKS: (
"Flow Sensor Clicks",
"mdi:water-pump",
"clicks",
None,
False,
DATA_PROVISION_SETTINGS,
),
TYPE_FREEZE_TEMP: (
"Freeze Protect Temperature",
"mdi:thermometer",
TEMP_CELSIUS,
"temperature",
True,
DATA_RESTRICTIONS_UNIVERSAL,
),
}
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up RainMachine sensors based on a config entry."""
rainmachine = hass.data[RAINMACHINE_DOMAIN][DATA_CLIENT][entry.entry_id]
async_add_entities(
[
RainMachineSensor(
rainmachine,
sensor_type,
name,
icon,
unit,
device_class,
enabled_by_default,
api_category,
)
for (
sensor_type,
(name, icon, unit, device_class, enabled_by_default, api_category),
) in SENSORS.items()
]
)
class RainMachineSensor(RainMachineEntity):
"""A sensor implementation for raincloud device."""
def __init__(
self,
rainmachine,
sensor_type,
name,
icon,
unit,
device_class,
enabled_by_default,
api_category,
):
"""Initialize."""
super().__init__(rainmachine)
self._api_category = api_category
self._device_class = device_class
self._enabled_by_default = enabled_by_default
self._icon = icon
self._name = name
self._sensor_type = sensor_type
self._state = None
self._unit = unit
@property
def entity_registry_enabled_default(self):
"""Determine whether an entity is enabled by default."""
return self._enabled_by_default
@property
def icon(self) -> str:
"""Return the icon."""
return self._icon
@property
def state(self) -> str:
"""Return the name of the entity."""
return self._state
@property
def unique_id(self) -> str:
"""Return a unique, Home Assistant friendly identifier for this entity."""
return "{}_{}".format(
self.rainmachine.device_mac.replace(":", ""), self._sensor_type
)
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit
async def async_added_to_hass(self):
"""Register callbacks."""
self.async_on_remove(
async_dispatcher_connect(self.hass, SENSOR_UPDATE_TOPIC, self._update_state)
)
await self.rainmachine.async_register_sensor_api_interest(self._api_category)
self.update_from_latest_data()
async def async_will_remove_from_hass(self):
"""Disconnect dispatcher listeners and deregister API interest."""
super().async_will_remove_from_hass()
self.rainmachine.async_deregister_sensor_api_interest(self._api_category)
@callback
def update_from_latest_data(self):
"""Update the sensor's state."""
if self._sensor_type == TYPE_FLOW_SENSOR_CLICK_M3:
self._state = self.rainmachine.data[DATA_PROVISION_SETTINGS]["system"].get(
"flowSensorClicksPerCubicMeter"
)
elif self._sensor_type == TYPE_FLOW_SENSOR_CONSUMED_LITERS:
clicks = self.rainmachine.data[DATA_PROVISION_SETTINGS]["system"].get(
"flowSensorWateringClicks"
)
clicks_per_m3 = self.rainmachine.data[DATA_PROVISION_SETTINGS][
"system"
].get("flowSensorClicksPerCubicMeter")
if clicks and clicks_per_m3:
self._state = (clicks * 1000) / clicks_per_m3
else:
self._state = None
elif self._sensor_type == TYPE_FLOW_SENSOR_START_INDEX:
self._state = self.rainmachine.data[DATA_PROVISION_SETTINGS]["system"].get(
"flowSensorStartIndex"
)
elif self._sensor_type == TYPE_FLOW_SENSOR_WATERING_CLICKS:
self._state = self.rainmachine.data[DATA_PROVISION_SETTINGS]["system"].get(
"flowSensorWateringClicks"
)
elif self._sensor_type == TYPE_FREEZE_TEMP:
self._state = self.rainmachine.data[DATA_RESTRICTIONS_UNIVERSAL][
"freezeProtectTemp"
]
| apache-2.0 |
demorest/rtpipe | rtpipe/RT.py | 1 | 66374 | import rtpipe.parsems as pm
import rtpipe.parsecal as pc
import rtpipe.parsesdm as ps
import rtlib_cython as rtlib
import multiprocessing as mp
import multiprocessing.sharedctypes as mps
from contextlib import closing
import numpy as n
from scipy.special import erf
import scipy.stats.mstats as mstats
try:
import casautil
except ImportError:
import pwkit.environments.casa.util as casautil
import os, glob, logging
import cPickle as pickle
from functools import partial
import random
import math
# setup CASA and logging
qa = casautil.tools.quanta()
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logging.captureWarnings(True)
logger = logging.getLogger('rtpipe')
def pipeline(d, segments):
""" Transient search pipeline running on single node.
Processes one or more segments of data (in which a single bgsub, (u,v,w), etc. can be used).
Can search completely, independently, and saves candidates.
If segments is a list of segments, then it will parallelize read/search processes.
Stages:
0) Take dictionary that defines metadata and search params
-- This defines state of pipeline, including times, uv extent, pipeline search parameters, etc.
1) Read data
-- Overlapping reads needed to maintain sensitivity to all DMs at all times
2) Prepare data
-- Reads/applies telcal/CASA solutions, flags, bg time subtraction
3) Search using all threads
-- Option for plug-and-play detection algorithm and multiple filters
4) Save candidate and noise info, if requested
"""
if type(segments) == int:
segments = [segments]
logger.info('Starting search of %s, scan %d, segments %s' % (d['filename'], d['scan'], str(segments)))
# seed the pseudo-random number generator # TJWL
random.seed()
# set up shared arrays to fill
data_read_mem = mps.Array(mps.ctypes.c_float, datasize(d)*2); data_mem = mps.Array(mps.ctypes.c_float, datasize(d)*2)
u_read_mem = mps.Array(mps.ctypes.c_float, d['nbl']); u_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
v_read_mem = mps.Array(mps.ctypes.c_float, d['nbl']); v_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
w_read_mem = mps.Array(mps.ctypes.c_float, d['nbl']); w_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
# need these if debugging
data = numpyview(data_mem, 'complex64', datashape(d)) # optional
data_read = numpyview(data_read_mem, 'complex64', datashape(d)) # optional
u = numpyview(u_mem, 'float32', d['nbl'], raw=False)
v = numpyview(v_mem, 'float32', d['nbl'], raw=False)
w = numpyview(w_mem, 'float32', d['nbl'], raw=False)
results = {}
# only one needed for parallel read/process. more would overwrite memory space
with closing(mp.Pool(1, initializer=initread, initargs=(data_read_mem, u_read_mem, v_read_mem, w_read_mem, data_mem, u_mem, v_mem, w_mem))) as readpool:
try:
# submit all segments to pool of 1. locking data should keep this from running away.
for segment in segments:
assert segment in range(d['nsegments']), 'Segment %d not in range of %d nsegments' % (segment, d['nsegments'])
candsfile = getcandsfile(d, segment)
if d['savecands'] and os.path.exists(candsfile):
logger.error('candsfile %s already exists. Ending processing...' % candsfile)
else:
results[segment] = readpool.apply_async(pipeline_dataprep, (d, segment)) # no need for segment here? need to think through structure...
# step through pool of jobs and pull data off as ready. this allows pool to continue to next segment.
while results.keys():
for segment in results.keys():
if results[segment].ready():
job = results.pop(segment)
d = job.get()
else:
continue
with data_mem.get_lock():
if d['mock']: # could be list or int
# assume that std of vis in the middle of the segment is
# characteristic of noise throughout the segment
falsecands = {}
datamid = n.ma.masked_equal(data[d['readints']/2].real, 0, copy=True)
madstd = 1.4826 * n.ma.median(n.abs(datamid - n.ma.median(datamid)))/n.sqrt(d['npol']*d['nbl']*d['nchan'])
std = datamid.std()/n.sqrt(d['npol']*d['nbl']*d['nchan'])
logger.debug('Noise per vis in central int: madstd {}, std {}'.format(madstd, std))
dt = 1 # pulse width in integrations
if isinstance(d['mock'], int):
for i in n.random.randint(d['datadelay'][-1], d['readints'], d['mock']): # add nmock transients at random ints
(loff, moff, A, DM) = make_transient(madstd, max(d['dmarr']), Amin=1.2*d['sigma_image1'])
candid = (int(segment), int(i), DM, int(dt), int(0))
falsecands[candid] = [A/madstd, A, loff, moff]
elif isinstance(d['mock'], list):
for mock in d['mock']:
try:
(i, DM, loff, moff, SNR) = mock
candid = (int(segment), int(i), DM, int(dt), int(0))
falsecands[candid] = [SNR, SNR*madstd, loff, moff]
except:
logger.warn('Could not parse mock parameters: {}'.format(mock))
else:
logger.warn('Not a recognized type for mock.')
for candid in falsecands:
(segment, i, DM, dt, beamnum) = candid
(SNR, A, loff, moff) = falsecands[candid]
logger.info('Adding mock transient at int %d, DM %.1f, (l, m) = (%f, %f) at est SNR %.1f' % (i, DM, loff, moff, SNR))
add_transient(d, data, u, v, w, loff, moff, i, A, DM, dt)
if d['savecands']:
savecands(d, falsecands, domock=True)
cands = search(d, data_mem, u_mem, v_mem, w_mem)
# save candidate info
if d['savecands']:
logger.info('Saving %d candidates for segment %d...'
% (len(cands), segment))
savecands(d, cands)
except KeyboardInterrupt:
logger.error('Caught Ctrl-C. Closing processing pool.')
readpool.terminate()
readpool.join()
raise
def pipeline_dataprep(d, segment):
""" Single-threaded pipeline for data prep that can be started in a pool.
"""
logger.debug('dataprep starting for segment %d' % segment)
# dataprep reads for a single segment, so d['segment'] defined here
d['segment'] = segment
# set up numpy arrays, as expected by dataprep functions
data_read = numpyview(data_read_mem, 'complex64', datashape(d), raw=False); data = numpyview(data_mem, 'complex64', datashape(d), raw=False)
u_read = numpyview(u_read_mem, 'float32', d['nbl'], raw=False); u = numpyview(u_mem, 'float32', d['nbl'], raw=False)
v_read = numpyview(v_read_mem, 'float32', d['nbl'], raw=False); v = numpyview(v_mem, 'float32', d['nbl'], raw=False)
w_read = numpyview(w_read_mem, 'float32', d['nbl'], raw=False); w = numpyview(w_mem, 'float32', d['nbl'], raw=False)
#### #### #### ####
# 1) Read data
#### #### #### ####
with data_read_mem.get_lock():
if d['dataformat'] == 'ms': # CASA-based read
segread = pm.readsegment(d, segment)
data_read[:] = segread[0]
(u_read[:], v_read[:], w_read[:]) = (segread[1][d['readints']/2], segread[2][d['readints']/2], segread[3][d['readints']/2]) # mid int good enough for segment. could extend this to save per chunk
del segread
elif d['dataformat'] == 'sdm':
data_read[:] = ps.read_bdf_segment(d, segment)
(u_read[:], v_read[:], w_read[:]) = ps.get_uvw_segment(d, segment)
#### #### #### ####
# 2) Prepare data
#### #### #### ####
# calibrate data
if os.path.exists(d['gainfile']):
try:
radec = (); spwind = []; calname = '' # set defaults
if '.GN' in d['gainfile']: # if telcal file
if d.has_key('calname'):
calname = d['calname']
sols = pc.telcal_sol(d['gainfile']) # parse gainfile
else: # if CASA table
if d.has_key('calradec'):
radec = d['calradec'] # optionally defined cal location
spwind = d['spw']
sols = pc.casa_sol(d['gainfile'], flagants=d['flagantsol']) # parse gainfile
sols.parsebp(d['bpfile']) # parse bpfile
# if gainfile parsed ok, choose best solution for data
sols.set_selection(d['segmenttimes'][segment].mean(), d['freq']*1e9, rtlib.calc_blarr(d), calname=calname, pols=d['pols'], radec=radec, spwind=spwind)
sols.apply(data_read)
except:
logger.warning('Could not parse or apply gainfile %s.' % d['gainfile'])
raise
else:
logger.warn('Calibration file not found. Proceeding with no calibration applied.')
# flag data
if len(d['flaglist']):
logger.info('Flagging with flaglist: %s' % d['flaglist'])
dataflag(d, data_read)
else:
logger.warn('No real-time flagging.')
# mean t vis subtration
if d['timesub'] == 'mean':
logger.info('Subtracting mean visibility in time...')
rtlib.meantsub(data_read, [0, d['nbl']])
else:
logger.warn('No mean time subtraction.')
# save noise pickle
if d['savenoise']:
noisepickle(d, data_read, u_read, v_read, w_read, chunk=200)
# phase to new location if l1,m1 set and nonzero value
try:
if any([d['l1'], d['m1']]):
logger.info('Rephasing data to (l, m)=(%.4f, %.4f).' % (d['l1'], d['m1']))
rtlib.phaseshift_threaded(data_read, d, d['l1'], d['m1'], u_read, v_read)
d['l0'] = d['l1']
d['m0'] = d['m1']
else:
logger.debug('Not rephasing.')
except KeyError:
pass
with data_mem.get_lock():
data[:] = data_read[:]
u[:] = u_read[:]; v[:] = v_read[:]; w[:] = w_read[:]
logger.debug('All data unlocked for segment %d' % segment)
# d now has segment keyword defined
return d
def pipeline_reproduce(d, candloc=[], segment=None, lm=None, product='data'):
""" Reproduce data and/or candidates with given candloc or lm coordinate.
d and segment can be given, if only reading data.
candloc is length 5 or 6 with ([scan], segment, candint, dmind, dtind, beamnum).
product can be 'data', 'dataph', 'imdata'.
lm is tuple of (l,m) coordinates in radians.
"""
# set up shared arrays to fill
data_reproduce_mem = mps.Array(mps.ctypes.c_float, datasize(d)*2)
data_read_mem = mps.Array(mps.ctypes.c_float, datasize(d)*2)
data_mem = mps.Array(mps.ctypes.c_float, datasize(d)*2)
u_read_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
u_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
v_read_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
v_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
w_read_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
w_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
# get numpy views of memory spaces
data = numpyview(data_mem, 'complex64', datashape(d)) # optional
data_read = numpyview(data_read_mem, 'complex64', datashape(d)) # optional
u = numpyview(u_mem, 'float32', d['nbl'], raw=False)
v = numpyview(v_mem, 'float32', d['nbl'], raw=False)
w = numpyview(w_mem, 'float32', d['nbl'], raw=False)
# set up state dict for merge pkl
if len(candloc) == 6:
scan, segment, candint, dmind, dtind, beamnum = candloc
# this is now defined by call to rtpipe.set_pipeline in parsecands.plot_cand
# d['scan'] = scan
# d['starttime_mjd'] = d['starttime_mjddict'][scan]
# d['nsegments'] = len(d['segmenttimesdict'][scan])
# d['segmenttimes'] = d['segmenttimesdict'][scan]
elif len(candloc) == 5: # if not a merge pkl, then d['scan'] is correct
segment, candint, dmind, dtind, beamnum = candloc
elif isinstance(segment, int):
assert product == 'data', 'If only providing segment, then only data product can be produced.'
else:
logger.error('candloc must be length 5 or 6 or segment provided.')
return
with closing(mp.Pool(1, initializer=initread, initargs=(data_read_mem, u_read_mem, v_read_mem, w_read_mem, data_mem, u_mem, v_mem, w_mem))) as readpool:
readpool.apply(pipeline_dataprep, (d, segment))
if product == 'data':
logger.info('Returning prepared data...')
return data
elif product == 'dataph':
logger.info('Reproducing data...')
assert lm, 'lm must be tuple with (l, m) coords in radians.'
data = runreproduce(d, data_mem, data_reproduce_mem, u, v, w, dmind, dtind, lm=lm)
return data
elif product == 'imdata':
logger.info('Reproducing candidate...')
im, data = runreproduce(d, data_mem, data_reproduce_mem, u, v, w, dmind, dtind, candint=candint)
return im, data
else:
logger.error('product must be data, dataph, or imdata.')
def meantsubpool(d, data_read):
""" Wrapper for mean visibility subtraction in time.
Doesn't work when called from pipeline using multiprocessing pool.
"""
logger.info('Subtracting mean visibility in time...')
data_read = numpyview(data_read_mem, 'complex64', datashape(d))
tsubpart = partial(rtlib.meantsub, data_read)
blranges = [(d['nbl'] * t/d['nthread'], d['nbl']*(t+1)/d['nthread']) for t in range(d['nthread'])]
with closing(mp.Pool(1, initializer=initreadonly, initargs=(data_read_mem,))) as tsubpool:
tsubpool.map(tsubpart, blr)
def dataflag(d, data_read):
""" Flagging data in single process
"""
for flag in d['flaglist']:
mode, sig, conv = flag
# resultlist = []
# with closing(mp.Pool(4, initializer=initreadonly, initargs=(data_read_mem,))) as flagpool:
for ss in d['spw']:
chans = n.arange(d['spw_chanr_select'][ss][0], d['spw_chanr_select'][ss][1])
for pol in range(d['npol']):
status = rtlib.dataflag(data_read, chans, pol, d, sig, mode, conv)
logger.info(status)
# hack to get rid of bad spw/pol combos whacked by rfi
if 'badspwpol' in d:
logger.info('Comparing overall power between spw/pol. Removing those with %d times typical value' % d['badspwpol'])
spwpol = {}
for spw in d['spw']:
chans = n.arange(d['spw_chanr_select'][spw][0], d['spw_chanr_select'][spw][1])
for pol in range(d['npol']):
spwpol[(spw, pol)] = n.abs(data_read[:,:,chans,pol]).std()
meanstd = n.mean(spwpol.values())
for (spw,pol) in spwpol:
if spwpol[(spw, pol)] > d['badspwpol']*meanstd:
logger.info('Flagging all of (spw %d, pol %d) for excess noise.' % (spw, pol))
chans = n.arange(d['spw_chanr_select'][spw][0], d['spw_chanr_select'][spw][1])
data_read[:,:,chans,pol] = 0j
def dataflagatom(chans, pol, d, sig, mode, conv):
""" Wrapper function to get shared memory as numpy array into pool
Assumes data_mem is global mps.Array
"""
data = numpyview(data_mem, 'complex64', datashape(d))
# data = n.ma.masked_array(data, data==0j) # this causes massive overflagging on 14sep03 data
return rtlib.dataflag(data, chans, pol, d, sig, mode, conv)
def search(d, data_mem, u_mem, v_mem, w_mem):
""" Search function.
Queues all trials with multiprocessing.
Assumes shared memory system with single uvw grid for all images.
"""
data = numpyview(data_mem, 'complex64', datashape(d))
u = numpyview(u_mem, 'float32', d['nbl'])
v = numpyview(v_mem, 'float32', d['nbl'])
w = numpyview(w_mem, 'float32', d['nbl'])
data_resamp_mem = mps.Array(mps.ctypes.c_float, datasize(d)*2)
data_resamp = numpyview(data_resamp_mem, 'complex64', datashape(d))
logger.debug('Search of segment %d' % d['segment'])
beamnum = 0 # not yet implemented
cands = {}
candsfile = getcandsfile(d)
if d['savecands'] and os.path.exists(candsfile):
logger.warn('candsfile %s already exists' % candsfile)
return cands
# make wterm kernels
if d['searchtype'] == 'image2w':
wres = 100
npix = max(d['npixx_full'], d['npixy_full'])
bls, uvkers = rtlib.genuvkernels(w, wres, npix, d['uvres'], thresh=0.05)
# SUBMITTING THE LOOPS
if n.any(data):
logger.debug('Searching in %d chunks with %d threads' % (d['nchunk'], d['nthread']))
logger.info('Dedispering to max (DM, dt) of (%d, %d) ...' % (d['dmarr'][-1], d['dtarr'][-1]) )
# open pool
with closing(mp.Pool(d['nthread'], initializer=initresamp, initargs=(data_mem, data_resamp_mem))) as resamppool:
blranges = [(d['nbl'] * t/d['nthread'], d['nbl']*(t+1)/d['nthread']) for t in range(d['nthread'])]
for dmind in xrange(len(d['dmarr'])):
dm = d['dmarr'][dmind]
logger.debug('Dedispersing for %d' % dm,)
dedisppart = partial(correct_dm, d, dm) # moves in fresh data
dedispresults = resamppool.map(dedisppart, blranges)
dtlast = 1
for dtind in xrange(len(d['dtarr'])):
dt = d['dtarr'][dtind]
if dt > 1:
# dedispersion in shared memory, mapped over baselines
# set partial functions for pool.map
logger.debug('Resampling for %d' % dt,)
resample = dt/dtlast
resamppart = partial(correct_dt, d, resample) # corrects in place
resampresults = resamppool.map(resamppart, blranges)
dtlast = dt
# set dm- and dt-dependent int ranges for segment
nskip_dm = ((d['datadelay'][-1] - d['datadelay'][dmind]) / dt) * (d['segment'] != 0) # nskip=0 for first segment
searchints = (d['readints'] - d['datadelay'][dmind]) / dt - nskip_dm
logger.debug('Imaging %d ints from %d for (%d,%d)' % (searchints, nskip_dm, dm, dt),)
# imaging in shared memory, mapped over ints
image1part = partial(image1, d, u, v, w, dmind, dtind, beamnum)
nchunkdt = min(searchints, max(d['nthread'], d['nchunk']/dt)) # parallelize in range bounded by (searchints, nthread)
irange = [(nskip_dm + searchints*chunk/nchunkdt, nskip_dm + searchints*(chunk+1)/nchunkdt) for chunk in range(nchunkdt)]
imageresults = resamppool.map(image1part, irange)
# COLLECTING THE RESULTS per dm/dt. Clears the way for overwriting data_resamp
for imageresult in imageresults:
for kk in imageresult.keys():
cands[kk] = imageresult[kk]
if 'sigma_plot' in d:
from rtpipe.reproduce import make_cand_plot as makecp
if 'snr2' in d['features']:
snrcol = d['features'].index('snr2')
elif 'snr1' in d['features']:
snrcol = d['features'].index('snr1')
maxsnr = max([0] + [value[snrcol] for value in cands.itervalues()]) # be sure max includes at least one value
if maxsnr > d['sigma_plot']:
segment, candint, dmind, dtind, beamnum = [key for key, value in cands.iteritems() if value[snrcol] == maxsnr][0]
logger.info('Making cand plot for scan %d, segment %d, candint %d, dmind %d, dtint %d with SNR %.1f.' % (d['scan'], segment, candint, dmind, dtind, maxsnr))
im, data = runreproduce(d, data_mem, data_resamp_mem, u, v, w, dmind, dtind, candint)
loclabel = [d['scan'], segment, candint, dmind, dtind, beamnum]
makecp(d, im, data, loclabel)
else:
logger.info('No candidate in segment %d above sigma_plot %.1f' % (d['segment'], d['sigma_plot']))
else:
logger.warn('Data for processing is zeros. Moving on...')
logger.info('Found %d cands in scan %d segment %d of %s. ' % (len(cands), d['scan'], d['segment'], d['filename']))
return cands
def runreproduce(d, data_mem, data_resamp_mem, u, v, w, dmind, dtind, candint=-1, lm=None, twindow=30):
""" Reproduce function, much like search.
Returns image and rephased data for given candint.
If no candint is given, it returns resampled data by default. Optionally rephases to lm=(l, m) coordinates.
"""
data_resamp = numpyview(data_resamp_mem, 'complex64', datashape(d))
with closing(mp.Pool(1, initializer=initresamp, initargs=(data_mem, data_resamp_mem))) as repropool:
# dedisperse
logger.info('Dedispersing with DM=%.1f, dt=%d...' % (d['dmarr'][dmind], d['dtarr'][dtind]))
repropool.apply(correct_dmdt, [d, dmind, dtind, (0,d['nbl'])])
# set up image
if 'image1' in d['searchtype']:
npixx = d['npixx']
npixy = d['npixy']
elif 'image2' in d['searchtype']:
npixx = d['npixx_full']
npixy = d['npixy_full']
if candint > -1:
if lm:
logger.warn('Using candint image to get l,m. Not using provided l,m.')
# image
logger.info('Imaging int %d with %d %d pixels...' % (candint, npixx, npixy))
im = repropool.apply(image1wrap, [d, u, v, w, npixx, npixy, candint/d['dtarr'][dtind]])
snrmin = im.min()/im.std()
snrmax = im.max()/im.std()
logger.info('Made image with SNR min, max: %.1f, %.1f' % (snrmin, snrmax))
if snrmax > -1*snrmin:
l1, m1 = calc_lm(d, im, minmax='max')
else:
l1, m1 = calc_lm(d, im, minmax='min')
# rephase and trim interesting ints out
repropool.apply(move_phasecenter, [d, l1, m1, u, v])
minint = max(candint/d['dtarr'][dtind]-twindow/2, 0)
maxint = min(candint/d['dtarr'][dtind]+twindow/2, len(data_resamp)/d['dtarr'][dtind])
return(im, data_resamp[minint:maxint].mean(axis=1))
else:
if lm:
l1, m1 = lm
repropool.apply(move_phasecenter, [d, l1, m1, u, v])
return data_resamp
def add_transient(d, data, u, v, w, l1, m1, i, s, dm=0, dt=1):
""" Add a transient to data.
l1, m1 are relative direction cosines (location) of transient
added at integration i (at highest freq) with brightness s (per int/chan/bl/pol in data units)
dm/dt are dispersion (in pc/cm3) and pulse width (in s).
"""
ang = lambda ch: l1 * u * d['freq'][ch]/d['freq_orig'][0] + m1 * v * d['freq'][ch]/d['freq_orig'][0]
delay = lambda ch: n.round(4.2e-3 * dm * (d['freq'][ch]**(-2) - d['freq'][-1]**(-2))/d['inttime'], 0).astype(int)
#snr_ideal = s/(data[i].real.std()/n.sqrt(d['npol']*d['nbl']*d['nchan']))
#logger.info('SNR of source with system brightness %.1f = %d (idealized; ok at low SNR)' % (s, int(snr_ideal)))
for ch in range(d['nchan']):
data[i+delay(ch):i+delay(ch)+dt, :, ch] += s * n.exp(2j*n.pi*ang(ch)[None,:,None])
def make_transient(std, DMmax, Amin=6., Amax=20., rmax=20., rmin=0., DMmin=0.):
""" Produce a mock transient pulse source for the purposes of characterizing the
detection success of the current pipeline.
Assumes
- Code to inject the transients does so by inserting at an array index
- Noise level at the center of the data array is characteristic of the
noise level throughout
Input
std - noise level in visibilities(?) at mid-point of segment
DMmax - maximum DM at which mock transient can be inserted [pc/cm^3]
Amin/Amax is amplitude in units of the std (calculated below)
rmax/rmin is radius range in arcmin
DMmin is min DM
Returns
loff - direction cosine offset of mock transient from phase center [radians]
moff - direction cosine offset of mock transient from phase center [radians]
A - amplitude of transient [std units]
DM - dispersion measure of mock transient [pc/cm^3]
"""
rad_arcmin = math.pi/(180*60)
phimin = 0.0
phimax = 2*math.pi
# Amplitude of transient, done in units of the std
# std is calculated assuming that noise level in the middle of the data,
# at index d['readints']/2, is characteristic of that throughout the data
A = random.uniform(Amin, Amax) * std
# Position of transient, in direction cosines
r = random.uniform(rmin, rmax)
phi = random.uniform(phimin, phimax)
loff = r*math.cos(phi) * rad_arcmin
moff = r*math.sin(phi) * rad_arcmin
# Dispersion measure
DM = random.uniform(DMmin, DMmax)
return loff, moff, A, DM
def pipeline_refine(d0, candloc, scaledm=2.1, scalepix=2, scaleuv=1.0, chans=[]):
"""
Reproduces candidate and potentially improves sensitivity through better DM and imaging parameters.
scale* parameters enhance sensitivity by making refining dmgrid and images.
Other options include:
d0['selectpol'] = ['RR']
d0['flaglist'] = [('blstd', 2.5, 0.05)]
"""
import rtpipe.parseparams as pp
assert len(candloc) == 6, 'candloc should be (scan, segment, candint, dmind, dtind, beamnum).'
scan, segment, candint, dmind, dtind, beamnum = candloc
d1 = d0.copy() # dont mess with original (mutable!)
# if file not at stated full path, assume it is local
if not os.path.exists(d1['filename']):
workdir = os.getcwd()
filename = os.path.join(workdir, os.path.basename(d1['filename']))
else:
filename = d1['filename']
# clean up d1 of superfluous keys
params = pp.Params() # will be used as input to rt.set_pipeline
for key in d1.keys():
if not hasattr(params, key):
_ = d1.pop(key)
d1['npix'] = 0; d1['uvres'] = 0
d1['savecands'] = False
d1['savenoise'] = False
# redefine d. many parameters modified after this to keep from messing up time boundaries/cand location
d = set_pipeline(filename, scan, **d1)
if chans:
d['chans'] = chans
# define memroy space
# trim data?
data_mem = mps.Array(mps.ctypes.c_float, datasize(d)*2)
u_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
v_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
w_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
data = numpyview(data_mem, 'complex64', datashape(d))
u = numpyview(u_mem, 'float32', d['nbl'])
v = numpyview(v_mem, 'float32', d['nbl'])
w = numpyview(w_mem, 'float32', d['nbl'])
# fill data, uvw
data[:] = pipeline_reproduce(d, segment=segment, product='data')
d['segment'] = segment
u[:], v[:], w[:] = ps.get_uvw_segment(d, segment)
# refine parameters
dmcand = d['dmarr'][dmind]
if scaledm > 1.:
try:
dmdelta = d['dmarr'][dmind+1] - d['dmarr'][dmind]
except IndexError:
try:
dmdelta = d['dmarr'][dmind] - d['dmarr'][dmind-1]
except IndexError:
dmdelta = 0.1*dmcand
d['dmarr'] = list(n.arange(dmcand-dmdelta, dmcand+dmdelta, dmdelta/scaledm))
else:
d['dmarr'] = [dmcand]
d['dtarr'] = [d['dtarr'][dtind]]
d['npixx'] = scalepix*d['npixx']
d['npixy'] = scalepix*d['npixy']
d['uvres'] = scaleuv*d['uvres']
# search
logger.info('Refining DM grid to %s and expanding images to (%d, %d) pix with uvres %d' % (str(d['dmarr']), d['npixx'], d['npixy'], d['uvres']))
cands = search(d, data_mem, u_mem, v_mem, w_mem)
# making cand plot from this
# need to keep from confusing old and new indices
# im, data = rt.pipeline_reproduce(d, loc[candnum], product='imdata')
# scan, segment, candint, dmind, dtind, beamnum = loc
# loclabel = scan, segment, candint, dmind, dtind, beamnum
# make_cand_plot(d, im, data, loclabel, outname=outname)
# return info to reproduce/visualize refined cands
return d, cands
def pipeline_lightcurve(d, l1=0, m1=0, segments=[], scan=-1):
""" Makes lightcurve at given (l1, m1)
l1, m1 define phase center. if not set, then image max is used.
"""
if scan == -1: scan = d['scan']
if segments == []: segments = range(d['nsegments'])
d = set_pipeline(d['filename'], scan, fileroot=d['fileroot'], dmarr=[0], dtarr=[1], savenoise=False, timesub='', logfile=False, nsegments=d['nsegments'])
# define memory and numpy arrays
data_mem = mps.Array(mps.ctypes.c_float, datasize(d)*2)
data_read_mem = mps.Array(mps.ctypes.c_float, datasize(d)*2)
data_resamp_mem = mps.Array(mps.ctypes.c_float, datasize(d)*2)
u_read_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
u_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
v_read_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
v_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
w_read_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
w_mem = mps.Array(mps.ctypes.c_float, d['nbl'])
data_read = numpyview(data_read_mem, 'complex64', datashape(d)) # optional
u_read = numpyview(u_read_mem, 'float32', d['nbl'], raw=False)
v_read = numpyview(v_read_mem, 'float32', d['nbl'], raw=False)
w_read = numpyview(w_read_mem, 'float32', d['nbl'], raw=False)
lightcurve = n.zeros(shape=(d['nints'], d['nchan'], d['npol']), dtype='complex64')
phasecenters = []
with closing(mp.Pool(1, initializer=initread, initargs=(data_read_mem, u_read_mem, v_read_mem, w_read_mem, data_mem, u_mem, v_mem, w_mem))) as readpool:
for segment in segments:
logger.info('Reading data...')
readpool.apply(pipeline_dataprep, (d, segment))
# get image peak for rephasing
if not any([l1, m1]):
im = sample_image(d, data_read, u_read, v_read, w_read, i=-1, verbose=1, imager='xy')
l2, m2 = calc_lm(d, im)
else:
l2 = l1
m2 = m1
logger.info('Rephasing data to (l, m)=(%.4f, %.4f).' % (l2, m2))
rtlib.phaseshift_threaded(data_read, d, l2, m2, u_read, v_read)
phasecenters.append( (l2,m2) )
nskip = (24*3600*(d['segmenttimes'][segment,0] - d['starttime_mjd'])/d['inttime']).astype(int) # insure that lc is set as what is read
lightcurve[nskip: nskip+d['readints']] = data_read.mean(axis=1)
return phasecenters, lightcurve
def set_pipeline(filename, scan, fileroot='', paramfile='', **kwargs):
""" Function defines pipeline state for search. Takes data/scan as input.
fileroot is base name for associated products (cal files, noise, cands). if blank, it is set to filename.
paramfile is name of file that defines all pipeline parameters (python-like syntax).
kwargs used to overload paramfile definitions.
Many parameters take 0 as default, which auto-defines ideal parameters.
This definition does not yet consider memory/cpu/time limitations.
nsegments defines how to break jobs in time. nchunk defines how many jobs are sent to nthreads.
"""
workdir = os.path.dirname(os.path.abspath(filename))
filename = filename.rstrip('/')
assert os.path.exists(filename)
# then get all metadata
if os.path.exists(os.path.join(filename, 'Main.xml')):
d = ps.get_metadata(filename, scan, paramfile=paramfile, **kwargs) # can take file name or Params instance
d['dataformat'] = 'sdm'
else:
d = pm.get_metadata(filename, scan, paramfile=paramfile, **kwargs)
d['dataformat'] = 'ms'
# define rootname for in/out cal/products
if fileroot:
d['fileroot'] = fileroot
else:
d['fileroot'] = os.path.basename(os.path.abspath(filename))
# autodetect calibration products locally
if not d['gainfile'] or not os.path.exists(d['gainfile']):
# first try to get CASA gain file
gainfilelist = glob.glob(os.path.join(d['workdir'], d['fileroot'] + '.g?'))
bpfilelist = glob.glob(os.path.join(d['workdir'], d['fileroot'] + '.b?'))
# if not in workdir, look locally
if not gainfilelist or not bpfilelist:
gainfilelist = glob.glob(d['fileroot'] + '.g?')
bpfilelist = glob.glob(d['fileroot'] + '.b?')
if gainfilelist and bpfilelist:
gainfilelist.sort()
d['gainfile'] = gainfilelist[-1]
logger.info('Autodetected CASA gainfile %s' % d['gainfile'])
bpfilelist.sort()
d['bpfile'] = bpfilelist[-1]
logger.info('Autodetected CASA bpfile %s' % d['bpfile'])
# if that fails, look for telcal file
filelist = glob.glob(os.path.join(d['workdir'], filename + '.GN'))
if not filelist:
filelist = glob.glob(filename + '.GN')
if filelist:
d['gainfile'] = filelist[0]
logger.info('Autodetected telcal file %s' % d['gainfile'])
assert os.path.exists(d['gainfile']), 'Calibration file autodetection failed for gainfile {0}'.format(d['gainfile'])
# define features
d['featureind'] = ['segment', 'int', 'dmind', 'dtind', 'beamnum'] # feature index. should be stable.
if 'features' not in d:
if d['searchtype'] == 'image1':
d['features'] = ['snr1', 'immax1', 'l1', 'm1'] # features returned by image1
elif d['searchtype'] == 'image1snip':
d['features'] = ['snr1', 'immax1', 'l1', 'm1', 'im40', 'spec20']
elif d['searchtype'] == 'image1stats':
d['features'] = ['snr1', 'immax1', 'l1', 'm1', 'specstd', 'specskew', 'speckurtosis', 'imskew', 'imkurtosis'] # note: spec statistics are all or nothing.
elif 'image2' in d['searchtype']:
d['features'] = ['snr1', 'immax1', 'l1', 'm1', 'snr2', 'immax2', 'l2', 'm2'] # features returned by image1
# set imaging parameters to use
if d['uvres'] == 0:
d['uvres'] = d['uvres_full']
else:
urange = d['urange'][scan]*(d['freq'].max()/d['freq_orig'][0]) # uvw from get_uvw already in lambda at ch0
vrange = d['vrange'][scan]*(d['freq'].max()/d['freq_orig'][0])
powers = n.fromfunction(lambda i,j: 2**i*3**j, (14,10), dtype='int') # power array for 2**i * 3**j
rangex = n.round(d['uvoversample']*urange).astype('int')
rangey = n.round(d['uvoversample']*vrange).astype('int')
largerx = n.where(powers-rangex/d['uvres'] > 0, powers, powers[-1,-1])
p2x, p3x = n.where(largerx == largerx.min())
largery = n.where(powers-rangey/d['uvres'] > 0, powers, powers[-1,-1])
p2y, p3y = n.where(largery == largery.min())
d['npixx_full'] = (2**p2x * 3**p3x)[0]
d['npixy_full'] = (2**p2y * 3**p3y)[0]
# set number of pixels to image
d['npixx'] = d['npixx_full']
d['npixy'] = d['npixy_full']
if 'npix_max' in d:
if d['npix_max']:
d['npixx'] = min(d['npix_max'], d['npixx_full'])
d['npixy'] = min(d['npix_max'], d['npixy_full'])
if d['npix']:
d['npixx'] = d['npix']
d['npixy'] = d['npix']
else:
d['npix'] = max(d['npixx'], d['npixy']) # this used to define fringe time
# define dmarr, if not already
if len(d['dmarr']) == 0:
if d.has_key('dm_maxloss') and d.has_key('maxdm') and d.has_key('dm_pulsewidth'):
d['dmarr'] = calc_dmgrid(d, maxloss=d['dm_maxloss'], maxdm=d['maxdm'], dt=d['dm_pulsewidth'])
if d['maxdm'] > 0:
logger.info('Calculated %d dms for max sensitivity loss %.2f, maxdm %d pc/cm3, and pulse width %d ms' % (len(d['dmarr']), d['dm_maxloss'], d['maxdm'], d['dm_pulsewidth']/1000))
else:
d['dmarr'] = [0]
logger.info('Can\'t calculate dm grid without dm_maxloss, maxdm, and dm_pulsewidth defined. Setting to [0].')
# define times for data to read
d['t_overlap'] = rtlib.calc_delay(d['freq'], d['inttime'], max(d['dmarr'])).max()*d['inttime'] # time of overlap for total dm coverage at segment boundaries
d['datadelay'] = [rtlib.calc_delay(d['freq'], d['inttime'],dm).max() for dm in d['dmarr']]
d['nints'] = d['nints'] - d['nskip']
# pols
if d.has_key('selectpol'):
d['pols'] = [pol for pol in d['pols_orig'] if pol in d['selectpol']]
else:
d['pols'] = d['pols_orig']
d['npol'] = len(d['pols'])
# split imaging into chunks. ideally one per thread, but can modify to fit available memory
if d['nchunk'] == 0:
d['nchunk'] = d['nthread']
# if nsegments is 0, then auto-define within memory limit
if not d['nsegments']:
fringetime = calc_fringetime(d)
d['nsegments'] = max(1, min(d['nints'], int(d['scale_nsegments']*d['inttime']*d['nints']/(fringetime-d['t_overlap'])))) # at least 1, at most nints
calc_segment_times(d)
# if auto nsegment definition makes segment too large, try to scale it down to fit in memory_limit (if provided)
# limit defined for dm sweep time and max nchunk/nthread ratio
if d.has_key('memory_limit'):
(vismem0, immem0) = calc_memory_footprint(d, limit=True)
assert vismem0+immem0 < d['memory_limit'], 'memory_limit of {0} is smaller than best solution of {1}. Try forcing nsegments/nchunk larger than {2}/{3} or reducing maxdm/npix'.format(d['memory_limit'], vismem0+immem0, d['nsegments'], max(d['dtarr'])/min(d['dtarr']))
(vismem, immem) = calc_memory_footprint(d)
if vismem+immem > d['memory_limit']:
logger.info('Over memory limit of {4} when reading {0} segments with {1} chunks ({2}/{3} GB for visibilities/imaging). Searching for solution down to {5}/{6} GB...'.format(d['nsegments'], d['nchunk'], vismem, immem, d['memory_limit'], vismem0, immem0))
while vismem+immem > d['memory_limit']:
(vismem, immem) = calc_memory_footprint(d)
logger.debug('Using {0} segments with {1} chunks ({2}/{3} GB for visibilities/imaging). Searching for better solution...'.format(d['nchunk'], vismem, immem, d['memory_limit']))
d['scale_nsegments'] = d['scale_nsegments'] * (vismem+immem)/float(d['memory_limit'])
d['nsegments'] = max(1, min(d['nints'], int(d['scale_nsegments']*d['inttime']*d['nints']/(fringetime-d['t_overlap'])))) # at least 1, at most nints
calc_segment_times(d)
(vismem, immem) = calc_memory_footprint(d)
while vismem+immem > d['memory_limit']:
logger.debug('Doubling nchunk from %d to fit in %d GB memory limit.' % (d['nchunk'], d['memory_limit']))
d['nchunk'] = 2*d['nchunk']
(vismem, immem) = calc_memory_footprint(d)
if d['nchunk'] == max(d['dtarr'])/min(d['dtarr'])*d['nthread']: # limit nchunk/nthread to at most the range in dt
d['nchunk'] = d['nthread']
break
(vismem, immem) = calc_memory_footprint(d)
# final set up of memory
calc_segment_times(d)
(vismem, immem) = calc_memory_footprint(d)
# scaling of number of integrations beyond dt=1
assert all(d['dtarr']) and (d['dtarr'] == sorted(d['dtarr'])), 'dtarr must be larger than 0 and in increasing order'
# calculate number of thermal noise candidates per segment
nfalse = calc_nfalse(d)
logger.info('')
logger.info('Pipeline summary:')
if '.GN' in d['gainfile']:
logger.info('\t Products saved with %s. telcal calibration with %s' % (d['fileroot'], os.path.basename(d['gainfile'])))
else:
logger.info('\t Products saved with %s. CASA calibration files (%s, %s)' % (d['fileroot'], os.path.basename(d['gainfile']), os.path.basename(d['bpfile'])))
logger.info('\t Using %d segment%s of %d ints (%.1f s) with overlap of %.1f s' % (d['nsegments'], "s"[not d['nsegments']-1:], d['readints'], d['t_segment'], d['t_overlap']))
if d['t_overlap'] > d['t_segment']/3.:
logger.info('\t\t Lots of segments needed, since Max DM sweep (%.1f s) close to segment size (%.2f s)' % (d['t_overlap'], d['t_segment']))
logger.info('\t Downsampling in time/freq by %d/%d and skipping %d ints from start of scan.' % (d['read_tdownsample'], d['read_fdownsample'], d['nskip']))
logger.info('\t Excluding ants %s' % (d['excludeants']))
logger.info('\t Using pols %s' % (d['pols']))
logger.info('')
logger.info('\t Search with %s and threshold %.1f.' % (d['searchtype'], d['sigma_image1']))
logger.info('\t Using %d DMs from %.1f to %.1f and dts %s.' % (len(d['dmarr']), min(d['dmarr']), max(d['dmarr']), d['dtarr']))
logger.info('\t Using uvgrid npix=(%d,%d) and res=%d.' % (d['npixx'], d['npixy'], d['uvres']))
logger.info('\t Expect %d thermal false positives per segment.' % nfalse)
logger.info('')
logger.info('\t Visibility memory usage is %.1f GB/segment' % vismem)
logger.info('\t Imaging in %d chunk%s using max of %.1f GB/segment' % (d['nchunk'], "s"[not d['nsegments']-1:], immem))
logger.info('\t Grand total memory usage: %.1f GB/segment' % (vismem + immem))
return d
def getcandsfile(d, segment=-1, domock=False):
""" Return name of candsfile for a given dictionary. Must have d['segment'] defined.
domock is option to save simulated cands.
"""
if domock:
prefix = 'candsmock_'
else:
prefix= 'cands_'
if d.has_key('segment'):
return os.path.join(d['workdir'], prefix + d['fileroot'] + '_sc' + str(d['scan']) + 'seg' + str(d['segment']) + '.pkl')
elif segment >= 0:
return os.path.join(d['workdir'], prefix + d['fileroot'] + '_sc' + str(d['scan']) + 'seg' + str(segment) + '.pkl')
else:
return ''
def getnoisefile(d, segment=-1):
""" Return name of noisefile for a given dictionary. Must have d['segment'] defined.
"""
if d.has_key('segment'):
return os.path.join(d['workdir'], 'noise_' + d['fileroot'] + '_sc' + str(d['scan']) + 'seg' + str(d['segment']) + '.pkl')
elif segment >= 0:
return os.path.join(d['workdir'], 'noise_' + d['fileroot'] + '_sc' + str(d['scan']) + 'seg' + str(segment) + '.pkl')
else:
return ''
def calc_nfalse(d):
""" Calculate the number of thermal-noise false positives per segment.
"""
dtfactor = n.sum([1./i for i in d['dtarr']]) # assumes dedisperse-all algorithm
ntrials = d['readints'] * dtfactor * len(d['dmarr']) * d['npixx'] * d['npixy']
qfrac = 1 - (erf(d['sigma_image1']/n.sqrt(2)) + 1)/2.
nfalse = int(qfrac*ntrials)
return nfalse
def calc_segment_times(d):
""" Helper function for set_pipeline to define segmenttimes list, given nsegments definition
"""
# this casts to int (flooring) to avoid 0.5 int rounding issue.
stopdts = n.linspace(d['nskip']+d['t_overlap']/d['inttime'], d['nints'], d['nsegments']+1)[1:] # nseg+1 assures that at least one seg made
startdts = n.concatenate( ([d['nskip']], stopdts[:-1]-d['t_overlap']/d['inttime']) )
segmenttimes = []
for (startdt, stopdt) in zip(d['inttime']*startdts, d['inttime']*stopdts):
starttime = qa.getvalue(qa.convert(qa.time(qa.quantity(d['starttime_mjd']+startdt/(24*3600),'d'),form=['ymd'], prec=9)[0], 's'))[0]/(24*3600)
stoptime = qa.getvalue(qa.convert(qa.time(qa.quantity(d['starttime_mjd']+stopdt/(24*3600), 'd'), form=['ymd'], prec=9)[0], 's'))[0]/(24*3600)
segmenttimes.append((starttime, stoptime))
d['segmenttimes'] = n.array(segmenttimes)
totaltimeread = 24*3600*(d['segmenttimes'][:, 1] - d['segmenttimes'][:, 0]).sum() # not guaranteed to be the same for each segment
d['readints'] = n.round(totaltimeread / (d['inttime']*d['nsegments']*d['read_tdownsample'])).astype(int)
d['t_segment'] = totaltimeread/d['nsegments']
def calc_memory_footprint(d, headroom=4., visonly=False, limit=False):
""" Given pipeline state dict, this function calculates the memory required
to store visibilities and make images.
headroom scales visibility memory size from single data object to all copies (and potential file read needs)
limit=True returns a the minimum memory configuration
Returns tuple of (vismem, immem) in units of GB.
"""
toGB = 8/1024.**3 # number of complex64s to GB
d0 = d.copy()
# limit defined for dm sweep time and max nchunk/nthread ratio
if limit:
d0['readints'] = d['t_overlap']/d['inttime']
d0['nchunk'] = max(d['dtarr'])/min(d['dtarr']) * d['nthread']
vismem = headroom * datasize(d0) * toGB
if visonly:
return vismem
else:
immem = d0['nthread'] * (d0['readints']/d0['nchunk'] * d0['npixx'] * d0['npixy']) * toGB
return (vismem, immem)
def calc_fringetime(d):
""" Estimate largest time span of a "segment".
A segment is the maximal time span that can be have a single bg fringe subtracted and uv grid definition.
Max fringe window estimated for 5% amp loss at first null averaged over all baselines. Assumes dec=+90, which is conservative.
Returns time in seconds that defines good window.
"""
maxbl = d['uvres']*d['npix']/2 # fringe time for imaged data only
fringetime = 0.5*(24*3600)/(2*n.pi*maxbl/25.) # max fringe window in seconds
return fringetime
def correct_dmdt(d, dmind, dtind, blrange):
""" Dedisperses and resamples data *in place*.
Drops edges, since it assumes that data is read with overlapping chunks in time.
"""
data = numpyview(data_mem, 'complex64', datashape(d))
data_resamp = numpyview(data_resamp_mem, 'complex64', datashape(d))
bl0,bl1 = blrange
data_resamp[:, bl0:bl1] = data[:, bl0:bl1]
rtlib.dedisperse_resample(data_resamp, d['freq'], d['inttime'], d['dmarr'][dmind], d['dtarr'][dtind], blrange, verbose=0) # dedisperses data.
def correct_dm(d, dm, blrange):
""" Dedisperses data into data_resamp
Drops edges, since it assumes that data is read with overlapping chunks in time.
"""
data = numpyview(data_mem, 'complex64', datashape(d))
data_resamp = numpyview(data_resamp_mem, 'complex64', datashape(d))
bl0,bl1 = blrange
data_resamp[:, bl0:bl1] = data[:, bl0:bl1]
rtlib.dedisperse_par(data_resamp, d['freq'], d['inttime'], dm, blrange, verbose=0) # dedisperses data.
def correct_dt(d, dt, blrange):
""" Resamples data_resamp
Drops edges, since it assumes that data is read with overlapping chunks in time.
"""
data = numpyview(data_mem, 'complex64', datashape(d))
data_resamp = numpyview(data_resamp_mem, 'complex64', datashape(d))
bl0,bl1 = blrange
rtlib.resample_par(data_resamp, d['freq'], d['inttime'], dt, blrange, verbose=0) # dedisperses data.
def calc_lm(d, im=[], pix=(), minmax='max'):
""" Helper function to calculate location of image pixel in (l,m) coords.
Assumes peak pixel, but input can be provided in pixel units.
minmax defines whether to look for image maximum or minimum.
"""
if len(pix) == 0: # default is to get pixel from image
if minmax == 'max':
peakl, peakm = n.where(im == im.max())
elif minmax == 'min':
peakl, peakm = n.where(im == im.min())
peakl = peakl[0]; peakm = peakm[0]
elif len(pix) == 2: # can also specify
peakl, peakm = pix
if len(im):
npixx, npixy = im.shape
else:
npixx = d['npixx']
npixy = d['npixy']
l1 = (npixx/2. - peakl)/(npixx*d['uvres'])
m1 = (npixy/2. - peakm)/(npixy*d['uvres'])
return l1, m1
def move_phasecenter(d, l1, m1, u, v):
""" Handler function for phaseshift_threaded
"""
logger.info('Rephasing data to (l, m)=(%.4f, %.4f).' % (l1, m1))
data_resamp = numpyview(data_resamp_mem, 'complex64', datashape(d))
rtlib.phaseshift_threaded(data_resamp, d, l1, m1, u, v)
def calc_dmgrid(d, maxloss=0.05, dt=3000., mindm=0., maxdm=0.):
""" Function to calculate the DM values for a given maximum sensitivity loss.
maxloss is sensitivity loss tolerated by dm bin width. dt is assumed pulse width in microsec.
"""
# parameters
tsamp = d['inttime']*1e6 # in microsec
k = 8.3
freq = d['freq'].mean() # central (mean) frequency in GHz
bw = 1e3*(d['freq'][-1] - d['freq'][0])
ch = 1e3*(d['freq'][1] - d['freq'][0]) # channel width in MHz
# width functions and loss factor
dt0 = lambda dm: n.sqrt(dt**2 + tsamp**2 + ((k*dm*ch)/(freq**3))**2)
dt1 = lambda dm, ddm: n.sqrt(dt**2 + tsamp**2 + ((k*dm*ch)/(freq**3))**2 + ((k*ddm*bw)/(freq**3.))**2)
loss = lambda dm, ddm: 1 - n.sqrt(dt0(dm)/dt1(dm,ddm))
loss_cordes = lambda ddm, dfreq, dt, freq: 1 - (n.sqrt(n.pi) / (2 * 6.91e-3 * ddm * dfreq / (dt*freq**3))) * erf(6.91e-3 * ddm * dfreq / (dt*freq**3)) # not quite right for underresolved pulses
if maxdm == 0:
return [0]
else:
# iterate over dmgrid to find optimal dm values. go higher than maxdm to be sure final list includes full range.
dmgrid = n.arange(mindm, maxdm, 0.05)
dmgrid_final = [dmgrid[0]]
for i in range(len(dmgrid)):
ddm = (dmgrid[i] - dmgrid_final[-1])/2.
ll = loss(dmgrid[i],ddm)
if ll > maxloss:
dmgrid_final.append(dmgrid[i])
return dmgrid_final
def image1(d, u, v, w, dmind, dtind, beamnum, irange):
""" Parallelizable function for imaging a chunk of data for a single dm.
Assumes data is dedispersed and resampled, so this just images each integration.
Simple one-stage imaging that returns dict of params.
returns dictionary with keys of cand location and values as tuple of features
"""
i0, i1 = irange
data_resamp = numpyview(data_resamp_mem, 'complex64', datashape(d))
# logger.info('i0 {0}, i1 {1}, dm {2}, dt {3}, len {4}'.format(i0, i1, dmind, dtind, len(data_resamp)))
ims,snr,candints = rtlib.imgallfullfilterxyflux(n.outer(u, d['freq']/d['freq_orig'][0]), n.outer(v, d['freq']/d['freq_orig'][0]), data_resamp[i0:i1], d['npixx'], d['npixy'], d['uvres'], d['sigma_image1'])
# logger.info('finished imaging candints {0}'.format(candints))
feat = {}
for i in xrange(len(candints)):
if snr[i] > 0:
l1, m1 = calc_lm(d, ims[i], minmax='max')
else:
l1, m1 = calc_lm(d, ims[i], minmax='min')
logger.info('Got one! Int=%d, DM=%d, dt=%d: SNR_im=%.1f @ (%.2e,%.2e).' % ((i0+candints[i])*d['dtarr'][dtind], d['dmarr'][dmind], d['dtarr'][dtind], snr[i], l1, m1))
candid = (d['segment'], (i0+candints[i])*d['dtarr'][dtind], dmind, dtind, beamnum)
# logger.info(candid)
# assemble feature in requested order
ff = []
for feature in d['features']:
if feature == 'snr1':
ff.append(snr[i])
elif feature == 'immax1':
if snr[i] > 0:
ff.append(ims[i].max())
else:
ff.append(ims[i].min())
elif feature == 'l1':
ff.append(l1)
elif feature == 'm1':
ff.append(m1)
elif feature == 'im40': # 40 pixel image peak cutout
peakx, peaky = n.where(ims[i] == ims[i].max())
sizex, sizey = ims[i].shape
# set image window with min=0 and max=size
xmin = max(0, peakx - 20); xmax = min(peakx + 20, sizex)
ymin = max(0, peaky - 20); ymax = min(peaky + 20, sizey)
ff.append(ims[i][xmin:xmax,ymin:ymax])
elif feature == 'spec20': # 20 int spectrum cutout
# set int window with min 0 and max len()
imin = max(0, (i0+candints[i])*d['dtarr'][dtind] - 10)
imax = min( (i0+candints[i])*d['dtarr'][dtind] + 10, len(data_resamp))
data_cut = data_resamp[imin:imax].copy()
rtlib.phaseshift_threaded(data_cut, d, l1, m1, u, v)
ff.append(data_cut.mean(axis=1))
elif feature in ['specstd', 'specskew', 'speckurtosis']: # this is standard set and must all appear together
if feature == 'specstd': # first this one, then others will use same data
seli = (i0+candints[i])*d['dtarr'][dtind]
datasel = data_resamp[seli:seli+1].copy()
rtlib.phaseshift_threaded(datasel, d, l1, m1, u, v)
data = n.ma.masked_equal(datasel, 0j)
spec = data.mean(axis=3).mean(axis=1).mean(axis=0).real
std = spec.std(axis=0)
ff.append(std)
elif feature == 'specskew':
skew = float(mstats.skew(spec))
ff.append(skew)
elif feature == 'speckurtosis':
kurtosis = float(mstats.kurtosis(spec))
ff.append(kurtosis)
elif feature == 'imskew':
skew = float(mstats.skew(ims[i].flatten()))
ff.append(skew)
elif feature == 'imkurtosis':
kurtosis = float(mstats.kurtosis(ims[i].flatten()))
ff.append(kurtosis)
feat[candid] = list(ff)
return feat
def image2(d, i0, i1, u, v, w, dmind, dtind, beamnum):
""" Parallelizable function for imaging a chunk of data for a single dm.
Assumes data is dedispersed and resampled, so this just images each integration.
Two-stage imaging uses ideal uv coverage in second image.
returns dictionary with keys of cand location and values as tuple of features
"""
data_resamp = numpyview(data_resamp_mem, 'complex64', datashape(d))
ims,snr,candints = rtlib.imgallfullfilterxy(n.outer(u, d['freq']/d['freq_orig'][0]), n.outer(v, d['freq']/d['freq_orig'][0]), data_resamp[i0:i1], d['npixx'], d['npixy'], d['uvres'], d['sigma_image1'])
feat = {}
for i in xrange(len(candints)):
# reimage
im2 = rtlib.imgonefullxy(n.outer(u, d['freq']/d['freq_orig'][0]), n.outer(v, d['freq']/d['freq_orig'][0]), data_resamp[i0+candints[i]], d['npixx_full'], d['npixy_full'], d['uvres'], verbose=0)
# find most extreme pixel
snrmax = im2.max()/im2.std()
snrmin = im2.min()/im2.std()
if snrmax >= abs(snrmin):
snr2 = snrmax
else:
snr2 = snrmin
# threshold
if abs(snr2) > d['sigma_image2']:
# calc loc in first image
if snr[i] > 0:
l1, m1 = calc_lm(d, ims[i], minmax='max')
else:
l1, m1 = calc_lm(d, ims[i], minmax='min')
# calc src loc in second image
if snr2 > 0:
l2, m2 = calc_lm(d, im2, minmax='max')
else:
l2, m2 = calc_lm(d, im2, minmax='min')
logger.info('Got one! Int=%d, DM=%d, dt=%d: SNR_im1=%.1f, SNR_im2=%.1f @ (%.2e,%.2e).' % ((i0+candints[i])*d['dtarr'][dtind], d['dmarr'][dmind], d['dtarr'][dtind], snr[i], snr2, l2, m2))
candid = (d['segment'], (i0+candints[i])*d['dtarr'][dtind], dmind, dtind, beamnum)
# assemble feature in requested order
ff = []
for feature in d['features']:
if feature == 'snr1':
ff.append(snr[i])
elif feature == 'immax1':
if snr[i] > 0:
ff.append(ims[i].max())
else:
ff.append(ims[i].min())
elif feature == 'l1':
ff.append(l1)
elif feature == 'm1':
ff.append(m1)
elif feature == 'snr2':
ff.append(snr2)
elif feature == 'immax2':
if snr2 > 0:
ff.append(im2.max())
else:
ff.append(im2.min())
elif feature == 'l2':
ff.append(l2)
elif feature == 'm2':
ff.append(m2)
feat[candid] = list(ff)
else:
logger.info('Almost... Int=%d, DM=%d, dt=%d: SNR_im1=%.1f, SNR_im2=%.1f.' % ((i0+candints[i])*d['dtarr'][dtind], d['dmarr'][dmind], d['dtarr'][dtind], snr[i], snr2))
return feat
def image2w(d, i0, i1, u, v, w, dmind, dtind, beamnum, bls, uvkers):
""" Parallelizable function for imaging a chunk of data for a single dm.
Assumes data is dedispersed and resampled, so this just images each integration.
Two-stage imaging uses ideal uv coverage in second image.
returns dictionary with keys of cand location and values as tuple of features
"""
data_resamp = numpyview(data_resamp_mem, 'complex64', datashape(d))
ims,snr,candints = rtlib.imgallfullfilterxy(n.outer(u, d['freq']/d['freq_orig'][0]), n.outer(v, d['freq']/d['freq_orig'][0]), data_resamp[i0:i1], d['npixx'], d['npixy'], d['uvres'], d['sigma_image1'])
feat = {}
for i in xrange(len(candints)):
# reimage
npix = max(d['npixx_full'], d['npixy_full'])
im2 = rtlib.imgonefullw(n.outer(u, d['freq']/d['freq_orig'][0]), n.outer(v, d['freq']/d['freq_orig'][0]), data_resamp[i0+candints[i]], npix, d['uvres'], bls, uvkers, verbose=1)
# find most extreme pixel
snrmax = im2.max()/im2.std()
snrmin = im2.min()/im2.std()
if snrmax >= abs(snrmin):
snr2 = snrmax
else:
snr2 = snrmin
# threshold
if abs(snr2) > d['sigma_image2']:
# calc loc in first image
if snr[i] > 0:
l1, m1 = calc_lm(d, ims[i], minmax='max')
else:
l1, m1 = calc_lm(d, ims[i], minmax='min')
# calc src loc in second image
if snr2 > 0:
l2, m2 = calc_lm(d, im2, minmax='max')
else:
l2, m2 = calc_lm(d, im2, minmax='min')
logger.info('Got one! Int=%d, DM=%d, dt=%d: SNR_im1=%.1f, SNR_im2=%.1f @ (%.2e,%.2e).' % ((i0+candints[i])*d['dtarr'][dtind], d['dmarr'][dmind], d['dtarr'][dtind], snr[i], snr2, l2, m2))
candid = (d['segment'], (i0+candints[i])*d['dtarr'][dtind], dmind, dtind, beamnum)
# assemble feature in requested order
ff = []
for feature in d['features']:
if feature == 'snr1':
ff.append(snr[i])
elif feature == 'immax1':
if snr[i] > 0:
ff.append(ims[i].max())
else:
ff.append(ims[i].min())
elif feature == 'l1':
ff.append(l1)
elif feature == 'm1':
ff.append(m1)
elif feature == 'snr2':
ff.append(snr2)
elif feature == 'immax2':
if snr2 > 0:
ff.append(im2.max())
else:
ff.append(im2.min())
elif feature == 'l2':
ff.append(l2)
elif feature == 'm2':
ff.append(m2)
feat[candid] = list(ff)
else:
logger.info('Almost... Int=%d, DM=%d, dt=%d: SNR_im1=%.1f, SNR_im2=%.1f.' % ((i0+candints[i])*d['dtarr'][dtind], d['dmarr'][dmind], d['dtarr'][dtind], snr[i], snr2))
return feat
def image1wrap(d, u, v, w, npixx, npixy, candint):
""" Parallelizable function for imaging a chunk of data for a single dm.
Assumes data is dedispersed and resampled, so this just images each integration.
Simple one-stage imaging that returns dict of params.
returns dictionary with keys of cand location and values as tuple of features
"""
data_resamp = numpyview(data_resamp_mem, 'complex64', datashape(d))
image = rtlib.imgonefullxy(n.outer(u, d['freq']/d['freq_orig'][0]), n.outer(v, d['freq']/d['freq_orig'][0]), data_resamp[candint], npixx, npixy, d['uvres'], verbose=1)
return image
def sample_image(d, data, u, v, w, i=-1, verbose=1, imager='xy', wres=100):
""" Samples one integration and returns image
i is integration to image. Default is mid int.
"""
if i == -1:
i = len(data)/2
if imager == 'xy':
image = rtlib.imgonefullxy(n.outer(u, d['freq']/d['freq_orig'][0]), n.outer(v, d['freq']/d['freq_orig'][0]), data[i], d['npixx'], d['npixy'], d['uvres'], verbose=verbose)
elif imager == 'w':
npix = max(d['npixx'], d['npixy'])
bls, uvkers = rtlib.genuvkernels(w, wres, npix, d['uvres'], ksize=21, oversample=1)
image = rtlib.imgonefullw(n.outer(u, d['freq']/d['freq_orig'][0]), n.outer(v, d['freq']/d['freq_orig'][0]), data[i], npix, d['uvres'], bls, uvkers, verbose=verbose)
# bls, lmkers = rtlib.genlmkernels(w, wres, npix, d['uvres'])
# image = rtlib.imgonefullw(n.outer(u, d['freq']/d['freq_orig'][0]), n.outer(v, d['freq']/d['freq_orig'][0]), data[i], npix, d['uvres'], [bls[0]], [lmkers[0]], verbose=verbose)
return image
def estimate_noiseperbl(data):
""" Takes large data array and sigma clips it to find noise per bl for input to detect_bispectra.
Takes mean across pols and channels for now, as in detect_bispectra.
"""
# define noise per baseline for data seen by detect_bispectra or image
datamean = data.mean(axis=2).imag # use imaginary part to estimate noise without calibrated, on-axis signal
(datameanmin, datameanmax) = rtlib.sigma_clip(datamean.flatten())
good = n.where( (datamean>datameanmin) & (datamean<datameanmax) )
noiseperbl = datamean[good].std() # measure single noise for input to detect_bispectra
logger.debug('Clipped to %d%% of data (%.3f to %.3f). Noise = %.3f.' % (100.*len(good[0])/len(datamean.flatten()), datameanmin, datameanmax, noiseperbl))
return noiseperbl
def noisepickle(d, data, u, v, w, chunk=200):
""" Calculates noise properties and saves values to pickle.
chunk defines window for measurement. at least one measurement always made.
"""
if d['savenoise']:
noisefile = getnoisefile(d)
if os.path.exists(noisefile):
logger.warn('noisefile %s already exists' % noisefile)
else:
nints = len(data)
chunk = min(chunk, nints) # ensure at least one measurement
results = []
rr = range(0, nints, chunk)
if len(rr) == 1: rr.append(1) # hack. need to make sure it iterates for nints=1 case
for i in range(len(rr)-1):
imid = (rr[i]+rr[i+1])/2
noiseperbl = estimate_noiseperbl(data[rr[i]:rr[i+1]])
imstd = sample_image(d, data, u, v, w, imid, verbose=0).std()
zerofrac = float(len(n.where(data[rr[i]:rr[i+1]] == 0j)[0]))/data[rr[i]:rr[i+1]].size
results.append( (d['segment'], noiseperbl, zerofrac, imstd) )
with open(noisefile, 'a') as pkl:
pickle.dump(results, pkl)
logger.info('Wrote %d noise measurement%s to %s.' % (len(results), 's'[:len(results)-1], noisefile))
def savecands(d, cands, domock=False):
""" Save all candidates in pkl file for later aggregation and filtering.
domock is option to save simulated cands file
"""
with open(getcandsfile(d, domock=domock), 'w') as pkl:
pickle.dump(d, pkl)
pickle.dump(cands, pkl)
def datashape(d):
return (d['readints'], d['nbl'], d['nchan'], d['npol'])
def datasize(d):
return long(d['readints']*d['nbl']*d['nchan']*d['npol'])
def numpyview(arr, datatype, shape, raw=False):
""" Takes mp shared array and returns numpy array with given shape.
"""
if raw:
return n.frombuffer(arr, dtype=n.dtype(datatype)).view(n.dtype(datatype)).reshape(shape) # for shared mps.RawArray
else:
return n.frombuffer(arr.get_obj(), dtype=n.dtype(datatype)).view(n.dtype(datatype)).reshape(shape) # for shared mp.Array
def initreadonly(shared_arr_):
global data_read_mem
data_read_mem = shared_arr_ # must be inhereted, not passed as an argument
def initresamp(shared_arr_, shared_arr2_):
global data_mem, data_resamp_mem
data_mem = shared_arr_
data_resamp_mem = shared_arr2_
def initread(shared_arr1_, shared_arr2_, shared_arr3_, shared_arr4_, shared_arr5_, shared_arr6_, shared_arr7_, shared_arr8_):
global data_read_mem, u_read_mem, v_read_mem, w_read_mem, data_mem, u_mem, v_mem, w_mem
data_read_mem = shared_arr1_ # must be inhereted, not passed as an argument
u_read_mem = shared_arr2_
v_read_mem = shared_arr3_
w_read_mem = shared_arr4_
data_mem = shared_arr5_
u_mem = shared_arr6_
v_mem = shared_arr7_
w_mem = shared_arr8_
| bsd-3-clause |
TNT-Samuel/Coding-Projects | DNS Server/Source - Copy/Lib/site-packages/setuptools/command/install_lib.py | 431 | 3840 | import os
import imp
from itertools import product, starmap
import distutils.command.install_lib as orig
class install_lib(orig.install_lib):
"""Don't add compiled flags to filenames of non-Python files"""
def run(self):
self.build()
outfiles = self.install()
if outfiles is not None:
# always compile, in case we have any extension stubs to deal with
self.byte_compile(outfiles)
def get_exclusions(self):
"""
Return a collections.Sized collections.Container of paths to be
excluded for single_version_externally_managed installations.
"""
all_packages = (
pkg
for ns_pkg in self._get_SVEM_NSPs()
for pkg in self._all_packages(ns_pkg)
)
excl_specs = product(all_packages, self._gen_exclusion_paths())
return set(starmap(self._exclude_pkg_path, excl_specs))
def _exclude_pkg_path(self, pkg, exclusion_path):
"""
Given a package name and exclusion path within that package,
compute the full exclusion path.
"""
parts = pkg.split('.') + [exclusion_path]
return os.path.join(self.install_dir, *parts)
@staticmethod
def _all_packages(pkg_name):
"""
>>> list(install_lib._all_packages('foo.bar.baz'))
['foo.bar.baz', 'foo.bar', 'foo']
"""
while pkg_name:
yield pkg_name
pkg_name, sep, child = pkg_name.rpartition('.')
def _get_SVEM_NSPs(self):
"""
Get namespace packages (list) but only for
single_version_externally_managed installations and empty otherwise.
"""
# TODO: is it necessary to short-circuit here? i.e. what's the cost
# if get_finalized_command is called even when namespace_packages is
# False?
if not self.distribution.namespace_packages:
return []
install_cmd = self.get_finalized_command('install')
svem = install_cmd.single_version_externally_managed
return self.distribution.namespace_packages if svem else []
@staticmethod
def _gen_exclusion_paths():
"""
Generate file paths to be excluded for namespace packages (bytecode
cache files).
"""
# always exclude the package module itself
yield '__init__.py'
yield '__init__.pyc'
yield '__init__.pyo'
if not hasattr(imp, 'get_tag'):
return
base = os.path.join('__pycache__', '__init__.' + imp.get_tag())
yield base + '.pyc'
yield base + '.pyo'
yield base + '.opt-1.pyc'
yield base + '.opt-2.pyc'
def copy_tree(
self, infile, outfile,
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
):
assert preserve_mode and preserve_times and not preserve_symlinks
exclude = self.get_exclusions()
if not exclude:
return orig.install_lib.copy_tree(self, infile, outfile)
# Exclude namespace package __init__.py* files from the output
from setuptools.archive_util import unpack_directory
from distutils import log
outfiles = []
def pf(src, dst):
if dst in exclude:
log.warn("Skipping installation of %s (namespace package)",
dst)
return False
log.info("copying %s -> %s", src, os.path.dirname(dst))
outfiles.append(dst)
return dst
unpack_directory(infile, outfile, pf)
return outfiles
def get_outputs(self):
outputs = orig.install_lib.get_outputs(self)
exclude = self.get_exclusions()
if exclude:
return [f for f in outputs if f not in exclude]
return outputs
| gpl-3.0 |
mozilla/firefox-flicks | flicks/videos/tests/test_util.py | 1 | 2199 | from django.core import mail
from django.test.utils import override_settings
from mock import patch
from nose.tools import eq_, ok_
from pyquery import PyQuery as pq
from flicks.base.tests import TestCase
from flicks.users.tests import UserFactory, UserProfileFactory
from flicks.videos.tests import VideoFactory
from flicks.videos.util import (send_approval_email, send_rejection_email,
vimeo_embed_code)
class TestVimeoEmbedCode(TestCase):
def test_basic(self):
result = vimeo_embed_code('id', width=5, height=2, elem_class='cls')
iframe = pq(result)
eq_(iframe[0].tag, 'iframe')
ok_(iframe.hasClass('cls'))
ok_(iframe.attr.src.startswith('https://player.vimeo.com/video/id'))
eq_(iframe.attr.width, '5')
eq_(iframe.attr.height, '2')
class SendApprovalEmailTests(TestCase):
def test_basic(self):
user = UserProfileFactory.create(user__email='boo@example.com').user
video = VideoFactory.create(user=user)
send_approval_email(video)
eq_(len(mail.outbox), 1)
eq_(mail.outbox[0].to, ['boo@example.com'])
@patch('flicks.videos.util.use_lang')
@override_settings(LANGUAGE_CODE='fr')
def test_no_profile(self, use_lang):
"""
If the user has no profile, use the installation's default language
code for the email locale.
"""
user = UserFactory.create(email='bar@example.com')
video = VideoFactory.create(user=user)
send_approval_email(video)
eq_(len(mail.outbox), 1)
eq_(mail.outbox[0].to, ['bar@example.com'])
use_lang.assert_called_with('fr')
class SendRejectionEmailTests(TestCase):
def test_invalid_user_id(self):
"""If the given user_id is invalid, do not send mail."""
send_rejection_email(999999999999)
eq_(len(mail.outbox), 0)
def test_valid_user_id(self):
"""If a valid user_id is given, send a rejection email."""
user = UserProfileFactory.create(user__email='blah@example.com').user
send_rejection_email(user.id)
eq_(len(mail.outbox), 1)
eq_(mail.outbox[0].to, ['blah@example.com'])
| bsd-3-clause |
waigani/GoAutoDeclare | goAutoDeclare.py | 1 | 3351 | # Copyright (c) 2014 Jesse Meek <https://github.com/waigani>
# This program is Free Software see LICENSE file for details.
""" GoAutoDeclare is a Sublime Text plugin which automatically corrects short
variable declaration ':=' and assignment operator '=' mistakes on save.
"""
import sublime, sublime_plugin, subprocess, re
errs = ["cannot assign to","no new variables on left side of"]
class GoAutoDeclareCommand(sublime_plugin.TextCommand):
""" Command gets called on save.
"""
def run(self, edit, output):
for s in output.split("\n"):
self.auto_correct(edit, 0, s)
self.auto_correct(edit, 1, s)
def auto_correct(self, edit, err, output):
""" Automatically correct assignment errors i.e. ':=' vs '='.
"""
v = self.view
regex = r"\:([^\:]*)\:.*(%s)" % errs[err]
g = re.search(regex, output)
if g is not None:
p = v.text_point(int(g.group(1))-1, 0)
line = v.line(p)
lineContents = v.substr(line)
changed = False
if err == 0:
if lineContents.find(":=") == -1 and lineContents.find("=") >=0:
lineContents = lineContents.replace("=",":=", 1)
changed = True
else:
if lineContents.find(":="):
lineContents = lineContents.replace(":=", "=", 1)
changed = True
v.replace(edit, line, lineContents)
# TODO: save file
# if changed:
# v.window().run_command("save")
class GoAutoDeclareEventListener(sublime_plugin.EventListener):
""" GoAutoDeclare formatter event listener class.
"""
def on_post_save_async(self, view):
""" Called just before the file is going to be saved.
"""
fn = view.file_name()
output = "No errs found"
if view.file_name()[-3:] == ".go":
if view.file_name()[-8:] == "_test.go":
output = self.run_cmd("test", fn)
else:
output = self.run_cmd("build", fn)
# Run a new command to use the edit object for this view.
view.run_command('go_auto_declare', {
'output': output[1]})
def run_cmd(self, cmd, fn):
""" Runs cmd with fn as first arg and returns errors.
"""
env = get_setting("env")
# Build cmd.
cmd = "export GOPATH=\"%(go_path)s\"; export GOROOT=\"%(go_root)s\"; export PATH=%(path)s; $GOROOT/bin/go %(cmd)s %(fn)s;" % {
"go_path": env["GOPATH"],
"go_root": env["GOROOT"],
"path": env["PATH"],
"cmd": cmd,
"fn": fn}
# Run the cmd.
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, shell=True)
result, err = p.communicate()
return result.decode('utf-8'), err.decode('utf-8')
def get_setting(key, default=None):
""" Returns the user setting if found, otherwise it returns the
default setting. If neither are set the 'default' value passed in is returned.
"""
val = sublime.load_settings("User.sublime-settings").get(key)
if not val:
val = sublime.load_settings("Default.sublime-settings").get(key)
if not val:
val = default
return val | mit |
acaranta/fig | fig/cli/errors.py | 1 | 1785 | from __future__ import absolute_import
from textwrap import dedent
class UserError(Exception):
def __init__(self, msg):
self.msg = dedent(msg).strip()
def __unicode__(self):
return self.msg
__str__ = __unicode__
class DockerNotFoundMac(UserError):
def __init__(self):
super(DockerNotFoundMac, self).__init__("""
Couldn't connect to Docker daemon. You might need to install docker-osx:
https://github.com/noplay/docker-osx
""")
class DockerNotFoundUbuntu(UserError):
def __init__(self):
super(DockerNotFoundUbuntu, self).__init__("""
Couldn't connect to Docker daemon. You might need to install Docker:
http://docs.docker.io/en/latest/installation/ubuntulinux/
""")
class DockerNotFoundGeneric(UserError):
def __init__(self):
super(DockerNotFoundGeneric, self).__init__("""
Couldn't connect to Docker daemon. You might need to install Docker:
http://docs.docker.io/en/latest/installation/
""")
class ConnectionErrorDockerOSX(UserError):
def __init__(self):
super(ConnectionErrorDockerOSX, self).__init__("""
Couldn't connect to Docker daemon - you might need to run `docker-osx shell`.
""")
class ConnectionErrorGeneric(UserError):
def __init__(self, url):
super(ConnectionErrorGeneric, self).__init__("""
Couldn't connect to Docker daemon at %s - is it running?
If it's at a non-standard location, specify the URL with the DOCKER_HOST environment variable.
""" % url)
class FigFileNotFound(UserError):
def __init__(self, filename):
super(FigFileNotFound, self).__init__("""
Can't find %s. Are you in the right directory?
""" % filename)
| apache-2.0 |
Biktorgj/Tizen_b2_Kernel | tools/perf/scripts/python/syscall-counts-by-pid.py | 11180 | 1927 | # system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
glwu/python-for-android | python3-alpha/python3-src/Tools/msi/schema.py | 88 | 83800 | from msilib import Table
_Validation = Table('_Validation')
_Validation.add_field(1,'Table',11552)
_Validation.add_field(2,'Column',11552)
_Validation.add_field(3,'Nullable',3332)
_Validation.add_field(4,'MinValue',4356)
_Validation.add_field(5,'MaxValue',4356)
_Validation.add_field(6,'KeyTable',7679)
_Validation.add_field(7,'KeyColumn',5378)
_Validation.add_field(8,'Category',7456)
_Validation.add_field(9,'Set',7679)
_Validation.add_field(10,'Description',7679)
ActionText = Table('ActionText')
ActionText.add_field(1,'Action',11592)
ActionText.add_field(2,'Description',7936)
ActionText.add_field(3,'Template',7936)
AdminExecuteSequence = Table('AdminExecuteSequence')
AdminExecuteSequence.add_field(1,'Action',0x2DFF)
AdminExecuteSequence.add_field(2,'Condition',7679)
AdminExecuteSequence.add_field(3,'Sequence',5378)
Condition = Table('Condition')
Condition.add_field(1,'Feature_',11558)
Condition.add_field(2,'Level',9474)
Condition.add_field(3,'Condition',7679)
AdminUISequence = Table('AdminUISequence')
AdminUISequence.add_field(1,'Action',0x2DFF)
AdminUISequence.add_field(2,'Condition',7679)
AdminUISequence.add_field(3,'Sequence',5378)
AdvtExecuteSequence = Table('AdvtExecuteSequence')
AdvtExecuteSequence.add_field(1,'Action',0x2DFF)
AdvtExecuteSequence.add_field(2,'Condition',7679)
AdvtExecuteSequence.add_field(3,'Sequence',5378)
AdvtUISequence = Table('AdvtUISequence')
AdvtUISequence.add_field(1,'Action',11592)
AdvtUISequence.add_field(2,'Condition',7679)
AdvtUISequence.add_field(3,'Sequence',5378)
AppId = Table('AppId')
AppId.add_field(1,'AppId',11558)
AppId.add_field(2,'RemoteServerName',7679)
AppId.add_field(3,'LocalService',7679)
AppId.add_field(4,'ServiceParameters',7679)
AppId.add_field(5,'DllSurrogate',7679)
AppId.add_field(6,'ActivateAtStorage',5378)
AppId.add_field(7,'RunAsInteractiveUser',5378)
AppSearch = Table('AppSearch')
AppSearch.add_field(1,'Property',11592)
AppSearch.add_field(2,'Signature_',11592)
Property = Table('Property')
Property.add_field(1,'Property',11592)
Property.add_field(2,'Value',3840)
BBControl = Table('BBControl')
BBControl.add_field(1,'Billboard_',11570)
BBControl.add_field(2,'BBControl',11570)
BBControl.add_field(3,'Type',3378)
BBControl.add_field(4,'X',1282)
BBControl.add_field(5,'Y',1282)
BBControl.add_field(6,'Width',1282)
BBControl.add_field(7,'Height',1282)
BBControl.add_field(8,'Attributes',4356)
BBControl.add_field(9,'Text',7986)
Billboard = Table('Billboard')
Billboard.add_field(1,'Billboard',11570)
Billboard.add_field(2,'Feature_',3366)
Billboard.add_field(3,'Action',7474)
Billboard.add_field(4,'Ordering',5378)
Feature = Table('Feature')
Feature.add_field(1,'Feature',11558)
Feature.add_field(2,'Feature_Parent',7462)
Feature.add_field(3,'Title',8000)
Feature.add_field(4,'Description',8191)
Feature.add_field(5,'Display',5378)
Feature.add_field(6,'Level',1282)
Feature.add_field(7,'Directory_',0x1DFF)
Feature.add_field(8,'Attributes',1282)
Binary = Table('Binary')
Binary.add_field(1,'Name',11592)
Binary.add_field(2,'Data',2304)
BindImage = Table('BindImage')
BindImage.add_field(1,'File_',0x2DFF)
BindImage.add_field(2,'Path',7679)
File = Table('File')
File.add_field(1,'File',0x2DFF)
File.add_field(2,'Component_',0xDFF)
File.add_field(3,'FileName',4095)
File.add_field(4,'FileSize',260)
File.add_field(5,'Version',0x1DFF)
File.add_field(6,'Language',7444)
File.add_field(7,'Attributes',5378)
File.add_field(8,'Sequence',1282)
CCPSearch = Table('CCPSearch')
CCPSearch.add_field(1,'Signature_',11592)
CheckBox = Table('CheckBox')
CheckBox.add_field(1,'Property',11592)
CheckBox.add_field(2,'Value',7488)
Class = Table('Class')
Class.add_field(1,'CLSID',11558)
Class.add_field(2,'Context',11552)
Class.add_field(3,'Component_',0x2DFF)
Class.add_field(4,'ProgId_Default',7679)
Class.add_field(5,'Description',8191)
Class.add_field(6,'AppId_',7462)
Class.add_field(7,'FileTypeMask',7679)
Class.add_field(8,'Icon_',7496)
Class.add_field(9,'IconIndex',5378)
Class.add_field(10,'DefInprocHandler',7456)
Class.add_field(11,'Argument',7679)
Class.add_field(12,'Feature_',3366)
Class.add_field(13,'Attributes',5378)
Component = Table('Component')
Component.add_field(1,'Component',0x2DFF)
Component.add_field(2,'ComponentId',7462)
Component.add_field(3,'Directory_',0xDFF)
Component.add_field(4,'Attributes',1282)
Component.add_field(5,'Condition',7679)
Component.add_field(6,'KeyPath',0x1DFF)
Icon = Table('Icon')
Icon.add_field(1,'Name',11592)
Icon.add_field(2,'Data',2304)
ProgId = Table('ProgId')
ProgId.add_field(1,'ProgId',11775)
ProgId.add_field(2,'ProgId_Parent',7679)
ProgId.add_field(3,'Class_',7462)
ProgId.add_field(4,'Description',8191)
ProgId.add_field(5,'Icon_',7496)
ProgId.add_field(6,'IconIndex',5378)
ComboBox = Table('ComboBox')
ComboBox.add_field(1,'Property',11592)
ComboBox.add_field(2,'Order',9474)
ComboBox.add_field(3,'Value',3392)
ComboBox.add_field(4,'Text',8000)
CompLocator = Table('CompLocator')
CompLocator.add_field(1,'Signature_',11592)
CompLocator.add_field(2,'ComponentId',3366)
CompLocator.add_field(3,'Type',5378)
Complus = Table('Complus')
Complus.add_field(1,'Component_',0x2DFF)
Complus.add_field(2,'ExpType',13570)
Directory = Table('Directory')
Directory.add_field(1,'Directory',0x2DFF)
Directory.add_field(2,'Directory_Parent',0x1DFF)
Directory.add_field(3,'DefaultDir',4095)
Control = Table('Control')
Control.add_field(1,'Dialog_',11592)
Control.add_field(2,'Control',11570)
Control.add_field(3,'Type',3348)
Control.add_field(4,'X',1282)
Control.add_field(5,'Y',1282)
Control.add_field(6,'Width',1282)
Control.add_field(7,'Height',1282)
Control.add_field(8,'Attributes',4356)
Control.add_field(9,'Property',7474)
Control.add_field(10,'Text',7936)
Control.add_field(11,'Control_Next',7474)
Control.add_field(12,'Help',7986)
Dialog = Table('Dialog')
Dialog.add_field(1,'Dialog',11592)
Dialog.add_field(2,'HCentering',1282)
Dialog.add_field(3,'VCentering',1282)
Dialog.add_field(4,'Width',1282)
Dialog.add_field(5,'Height',1282)
Dialog.add_field(6,'Attributes',4356)
Dialog.add_field(7,'Title',8064)
Dialog.add_field(8,'Control_First',3378)
Dialog.add_field(9,'Control_Default',7474)
Dialog.add_field(10,'Control_Cancel',7474)
ControlCondition = Table('ControlCondition')
ControlCondition.add_field(1,'Dialog_',11592)
ControlCondition.add_field(2,'Control_',11570)
ControlCondition.add_field(3,'Action',11570)
ControlCondition.add_field(4,'Condition',11775)
ControlEvent = Table('ControlEvent')
ControlEvent.add_field(1,'Dialog_',11592)
ControlEvent.add_field(2,'Control_',11570)
ControlEvent.add_field(3,'Event',11570)
ControlEvent.add_field(4,'Argument',11775)
ControlEvent.add_field(5,'Condition',15871)
ControlEvent.add_field(6,'Ordering',5378)
CreateFolder = Table('CreateFolder')
CreateFolder.add_field(1,'Directory_',0x2DFF)
CreateFolder.add_field(2,'Component_',0x2DFF)
CustomAction = Table('CustomAction')
CustomAction.add_field(1,'Action',0x2DFF)
CustomAction.add_field(2,'Type',1282)
CustomAction.add_field(3,'Source',0x1DFF)
CustomAction.add_field(4,'Target',7679)
DrLocator = Table('DrLocator')
DrLocator.add_field(1,'Signature_',11592)
DrLocator.add_field(2,'Parent',15688)
DrLocator.add_field(3,'Path',15871)
DrLocator.add_field(4,'Depth',5378)
DuplicateFile = Table('DuplicateFile')
DuplicateFile.add_field(1,'FileKey',11592)
DuplicateFile.add_field(2,'Component_',0xDFF)
DuplicateFile.add_field(3,'File_',0xDFF)
DuplicateFile.add_field(4,'DestName',8191)
DuplicateFile.add_field(5,'DestFolder',7496)
Environment = Table('Environment')
Environment.add_field(1,'Environment',11592)
Environment.add_field(2,'Name',4095)
Environment.add_field(3,'Value',8191)
Environment.add_field(4,'Component_',0xDFF)
Error = Table('Error')
Error.add_field(1,'Error',9474)
Error.add_field(2,'Message',7936)
EventMapping = Table('EventMapping')
EventMapping.add_field(1,'Dialog_',11592)
EventMapping.add_field(2,'Control_',11570)
EventMapping.add_field(3,'Event',11570)
EventMapping.add_field(4,'Attribute',3378)
Extension = Table('Extension')
Extension.add_field(1,'Extension',11775)
Extension.add_field(2,'Component_',0x2DFF)
Extension.add_field(3,'ProgId_',7679)
Extension.add_field(4,'MIME_',7488)
Extension.add_field(5,'Feature_',3366)
MIME = Table('MIME')
MIME.add_field(1,'ContentType',11584)
MIME.add_field(2,'Extension_',3583)
MIME.add_field(3,'CLSID',7462)
FeatureComponents = Table('FeatureComponents')
FeatureComponents.add_field(1,'Feature_',11558)
FeatureComponents.add_field(2,'Component_',0x2DFF)
FileSFPCatalog = Table('FileSFPCatalog')
FileSFPCatalog.add_field(1,'File_',0x2DFF)
FileSFPCatalog.add_field(2,'SFPCatalog_',11775)
SFPCatalog = Table('SFPCatalog')
SFPCatalog.add_field(1,'SFPCatalog',11775)
SFPCatalog.add_field(2,'Catalog',2304)
SFPCatalog.add_field(3,'Dependency',7424)
Font = Table('Font')
Font.add_field(1,'File_',0x2DFF)
Font.add_field(2,'FontTitle',7552)
IniFile = Table('IniFile')
IniFile.add_field(1,'IniFile',11592)
IniFile.add_field(2,'FileName',4095)
IniFile.add_field(3,'DirProperty',7496)
IniFile.add_field(4,'Section',3936)
IniFile.add_field(5,'Key',3968)
IniFile.add_field(6,'Value',4095)
IniFile.add_field(7,'Action',1282)
IniFile.add_field(8,'Component_',0xDFF)
IniLocator = Table('IniLocator')
IniLocator.add_field(1,'Signature_',11592)
IniLocator.add_field(2,'FileName',3583)
IniLocator.add_field(3,'Section',3424)
IniLocator.add_field(4,'Key',3456)
IniLocator.add_field(5,'Field',5378)
IniLocator.add_field(6,'Type',5378)
InstallExecuteSequence = Table('InstallExecuteSequence')
InstallExecuteSequence.add_field(1,'Action',0x2DFF)
InstallExecuteSequence.add_field(2,'Condition',7679)
InstallExecuteSequence.add_field(3,'Sequence',5378)
InstallUISequence = Table('InstallUISequence')
InstallUISequence.add_field(1,'Action',0x2DFF)
InstallUISequence.add_field(2,'Condition',7679)
InstallUISequence.add_field(3,'Sequence',5378)
IsolatedComponent = Table('IsolatedComponent')
IsolatedComponent.add_field(1,'Component_Shared',0x2DFF)
IsolatedComponent.add_field(2,'Component_Application',0x2DFF)
LaunchCondition = Table('LaunchCondition')
LaunchCondition.add_field(1,'Condition',11775)
LaunchCondition.add_field(2,'Description',4095)
ListBox = Table('ListBox')
ListBox.add_field(1,'Property',11592)
ListBox.add_field(2,'Order',9474)
ListBox.add_field(3,'Value',3392)
ListBox.add_field(4,'Text',8000)
ListView = Table('ListView')
ListView.add_field(1,'Property',11592)
ListView.add_field(2,'Order',9474)
ListView.add_field(3,'Value',3392)
ListView.add_field(4,'Text',8000)
ListView.add_field(5,'Binary_',7496)
LockPermissions = Table('LockPermissions')
LockPermissions.add_field(1,'LockObject',11592)
LockPermissions.add_field(2,'Table',11552)
LockPermissions.add_field(3,'Domain',15871)
LockPermissions.add_field(4,'User',11775)
LockPermissions.add_field(5,'Permission',4356)
Media = Table('Media')
Media.add_field(1,'DiskId',9474)
Media.add_field(2,'LastSequence',1282)
Media.add_field(3,'DiskPrompt',8000)
Media.add_field(4,'Cabinet',7679)
Media.add_field(5,'VolumeLabel',7456)
Media.add_field(6,'Source',7496)
MoveFile = Table('MoveFile')
MoveFile.add_field(1,'FileKey',11592)
MoveFile.add_field(2,'Component_',0xDFF)
MoveFile.add_field(3,'SourceName',8191)
MoveFile.add_field(4,'DestName',8191)
MoveFile.add_field(5,'SourceFolder',7496)
MoveFile.add_field(6,'DestFolder',3400)
MoveFile.add_field(7,'Options',1282)
MsiAssembly = Table('MsiAssembly')
MsiAssembly.add_field(1,'Component_',0x2DFF)
MsiAssembly.add_field(2,'Feature_',3366)
MsiAssembly.add_field(3,'File_Manifest',0x1DFF)
MsiAssembly.add_field(4,'File_Application',0x1DFF)
MsiAssembly.add_field(5,'Attributes',5378)
MsiAssemblyName = Table('MsiAssemblyName')
MsiAssemblyName.add_field(1,'Component_',0x2DFF)
MsiAssemblyName.add_field(2,'Name',11775)
MsiAssemblyName.add_field(3,'Value',3583)
MsiDigitalCertificate = Table('MsiDigitalCertificate')
MsiDigitalCertificate.add_field(1,'DigitalCertificate',11592)
MsiDigitalCertificate.add_field(2,'CertData',2304)
MsiDigitalSignature = Table('MsiDigitalSignature')
MsiDigitalSignature.add_field(1,'Table',11552)
MsiDigitalSignature.add_field(2,'SignObject',11592)
MsiDigitalSignature.add_field(3,'DigitalCertificate_',3400)
MsiDigitalSignature.add_field(4,'Hash',6400)
MsiFileHash = Table('MsiFileHash')
MsiFileHash.add_field(1,'File_',0x2DFF)
MsiFileHash.add_field(2,'Options',1282)
MsiFileHash.add_field(3,'HashPart1',260)
MsiFileHash.add_field(4,'HashPart2',260)
MsiFileHash.add_field(5,'HashPart3',260)
MsiFileHash.add_field(6,'HashPart4',260)
MsiPatchHeaders = Table('MsiPatchHeaders')
MsiPatchHeaders.add_field(1,'StreamRef',11558)
MsiPatchHeaders.add_field(2,'Header',2304)
ODBCAttribute = Table('ODBCAttribute')
ODBCAttribute.add_field(1,'Driver_',11592)
ODBCAttribute.add_field(2,'Attribute',11560)
ODBCAttribute.add_field(3,'Value',8191)
ODBCDriver = Table('ODBCDriver')
ODBCDriver.add_field(1,'Driver',11592)
ODBCDriver.add_field(2,'Component_',0xDFF)
ODBCDriver.add_field(3,'Description',3583)
ODBCDriver.add_field(4,'File_',0xDFF)
ODBCDriver.add_field(5,'File_Setup',0x1DFF)
ODBCDataSource = Table('ODBCDataSource')
ODBCDataSource.add_field(1,'DataSource',0x2DFF)
ODBCDataSource.add_field(2,'Component_',0xDFF)
ODBCDataSource.add_field(3,'Description',3583)
ODBCDataSource.add_field(4,'DriverDescription',3583)
ODBCDataSource.add_field(5,'Registration',1282)
ODBCSourceAttribute = Table('ODBCSourceAttribute')
ODBCSourceAttribute.add_field(1,'DataSource_',11592)
ODBCSourceAttribute.add_field(2,'Attribute',11552)
ODBCSourceAttribute.add_field(3,'Value',8191)
ODBCTranslator = Table('ODBCTranslator')
ODBCTranslator.add_field(1,'Translator',11592)
ODBCTranslator.add_field(2,'Component_',0xDFF)
ODBCTranslator.add_field(3,'Description',3583)
ODBCTranslator.add_field(4,'File_',0xDFF)
ODBCTranslator.add_field(5,'File_Setup',0x1DFF)
Patch = Table('Patch')
Patch.add_field(1,'File_',11592)
Patch.add_field(2,'Sequence',9474)
Patch.add_field(3,'PatchSize',260)
Patch.add_field(4,'Attributes',1282)
Patch.add_field(5,'Header',6400)
Patch.add_field(6,'StreamRef_',7462)
PatchPackage = Table('PatchPackage')
PatchPackage.add_field(1,'PatchId',11558)
PatchPackage.add_field(2,'Media_',1282)
PublishComponent = Table('PublishComponent')
PublishComponent.add_field(1,'ComponentId',11558)
PublishComponent.add_field(2,'Qualifier',11775)
PublishComponent.add_field(3,'Component_',0x2DFF)
PublishComponent.add_field(4,'AppData',8191)
PublishComponent.add_field(5,'Feature_',3366)
RadioButton = Table('RadioButton')
RadioButton.add_field(1,'Property',11592)
RadioButton.add_field(2,'Order',9474)
RadioButton.add_field(3,'Value',3392)
RadioButton.add_field(4,'X',1282)
RadioButton.add_field(5,'Y',1282)
RadioButton.add_field(6,'Width',1282)
RadioButton.add_field(7,'Height',1282)
RadioButton.add_field(8,'Text',8000)
RadioButton.add_field(9,'Help',7986)
Registry = Table('Registry')
Registry.add_field(1,'Registry',0x2DFF)
Registry.add_field(2,'Root',1282)
Registry.add_field(3,'Key',4095)
Registry.add_field(4,'Name',8191)
Registry.add_field(5,'Value',7936)
Registry.add_field(6,'Component_',0xDFF)
RegLocator = Table('RegLocator')
RegLocator.add_field(1,'Signature_',11592)
RegLocator.add_field(2,'Root',1282)
RegLocator.add_field(3,'Key',3583)
RegLocator.add_field(4,'Name',7679)
RegLocator.add_field(5,'Type',5378)
RemoveFile = Table('RemoveFile')
RemoveFile.add_field(1,'FileKey',11592)
RemoveFile.add_field(2,'Component_',0xDFF)
RemoveFile.add_field(3,'FileName',8191)
RemoveFile.add_field(4,'DirProperty',3400)
RemoveFile.add_field(5,'InstallMode',1282)
RemoveIniFile = Table('RemoveIniFile')
RemoveIniFile.add_field(1,'RemoveIniFile',11592)
RemoveIniFile.add_field(2,'FileName',4095)
RemoveIniFile.add_field(3,'DirProperty',7496)
RemoveIniFile.add_field(4,'Section',3936)
RemoveIniFile.add_field(5,'Key',3968)
RemoveIniFile.add_field(6,'Value',8191)
RemoveIniFile.add_field(7,'Action',1282)
RemoveIniFile.add_field(8,'Component_',0xDFF)
RemoveRegistry = Table('RemoveRegistry')
RemoveRegistry.add_field(1,'RemoveRegistry',11592)
RemoveRegistry.add_field(2,'Root',1282)
RemoveRegistry.add_field(3,'Key',4095)
RemoveRegistry.add_field(4,'Name',8191)
RemoveRegistry.add_field(5,'Component_',0xDFF)
ReserveCost = Table('ReserveCost')
ReserveCost.add_field(1,'ReserveKey',11592)
ReserveCost.add_field(2,'Component_',0xDFF)
ReserveCost.add_field(3,'ReserveFolder',7496)
ReserveCost.add_field(4,'ReserveLocal',260)
ReserveCost.add_field(5,'ReserveSource',260)
SelfReg = Table('SelfReg')
SelfReg.add_field(1,'File_',0x2DFF)
SelfReg.add_field(2,'Cost',5378)
ServiceControl = Table('ServiceControl')
ServiceControl.add_field(1,'ServiceControl',11592)
ServiceControl.add_field(2,'Name',4095)
ServiceControl.add_field(3,'Event',1282)
ServiceControl.add_field(4,'Arguments',8191)
ServiceControl.add_field(5,'Wait',5378)
ServiceControl.add_field(6,'Component_',0xDFF)
ServiceInstall = Table('ServiceInstall')
ServiceInstall.add_field(1,'ServiceInstall',11592)
ServiceInstall.add_field(2,'Name',3583)
ServiceInstall.add_field(3,'DisplayName',8191)
ServiceInstall.add_field(4,'ServiceType',260)
ServiceInstall.add_field(5,'StartType',260)
ServiceInstall.add_field(6,'ErrorControl',260)
ServiceInstall.add_field(7,'LoadOrderGroup',7679)
ServiceInstall.add_field(8,'Dependencies',7679)
ServiceInstall.add_field(9,'StartName',7679)
ServiceInstall.add_field(10,'Password',7679)
ServiceInstall.add_field(11,'Arguments',7679)
ServiceInstall.add_field(12,'Component_',0xDFF)
ServiceInstall.add_field(13,'Description',8191)
Shortcut = Table('Shortcut')
Shortcut.add_field(1,'Shortcut',11592)
Shortcut.add_field(2,'Directory_',0xDFF)
Shortcut.add_field(3,'Name',3968)
Shortcut.add_field(4,'Component_',0xDFF)
Shortcut.add_field(5,'Target',3400)
Shortcut.add_field(6,'Arguments',7679)
Shortcut.add_field(7,'Description',8191)
Shortcut.add_field(8,'Hotkey',5378)
Shortcut.add_field(9,'Icon_',7496)
Shortcut.add_field(10,'IconIndex',5378)
Shortcut.add_field(11,'ShowCmd',5378)
Shortcut.add_field(12,'WkDir',7496)
Signature = Table('Signature')
Signature.add_field(1,'Signature',11592)
Signature.add_field(2,'FileName',3583)
Signature.add_field(3,'MinVersion',7444)
Signature.add_field(4,'MaxVersion',7444)
Signature.add_field(5,'MinSize',4356)
Signature.add_field(6,'MaxSize',4356)
Signature.add_field(7,'MinDate',4356)
Signature.add_field(8,'MaxDate',4356)
Signature.add_field(9,'Languages',7679)
TextStyle = Table('TextStyle')
TextStyle.add_field(1,'TextStyle',11592)
TextStyle.add_field(2,'FaceName',3360)
TextStyle.add_field(3,'Size',1282)
TextStyle.add_field(4,'Color',4356)
TextStyle.add_field(5,'StyleBits',5378)
TypeLib = Table('TypeLib')
TypeLib.add_field(1,'LibID',11558)
TypeLib.add_field(2,'Language',9474)
TypeLib.add_field(3,'Component_',0x2DFF)
TypeLib.add_field(4,'Version',4356)
TypeLib.add_field(5,'Description',8064)
TypeLib.add_field(6,'Directory_',0x1DFF)
TypeLib.add_field(7,'Feature_',3366)
TypeLib.add_field(8,'Cost',4356)
UIText = Table('UIText')
UIText.add_field(1,'Key',11592)
UIText.add_field(2,'Text',8191)
Upgrade = Table('Upgrade')
Upgrade.add_field(1,'UpgradeCode',11558)
Upgrade.add_field(2,'VersionMin',15636)
Upgrade.add_field(3,'VersionMax',15636)
Upgrade.add_field(4,'Language',15871)
Upgrade.add_field(5,'Attributes',8452)
Upgrade.add_field(6,'Remove',7679)
Upgrade.add_field(7,'ActionProperty',3400)
Verb = Table('Verb')
Verb.add_field(1,'Extension_',11775)
Verb.add_field(2,'Verb',11552)
Verb.add_field(3,'Sequence',5378)
Verb.add_field(4,'Command',8191)
Verb.add_field(5,'Argument',8191)
tables=[_Validation, ActionText, AdminExecuteSequence, Condition, AdminUISequence, AdvtExecuteSequence, AdvtUISequence, AppId, AppSearch, Property, BBControl, Billboard, Feature, Binary, BindImage, File, CCPSearch, CheckBox, Class, Component, Icon, ProgId, ComboBox, CompLocator, Complus, Directory, Control, Dialog, ControlCondition, ControlEvent, CreateFolder, CustomAction, DrLocator, DuplicateFile, Environment, Error, EventMapping, Extension, MIME, FeatureComponents, FileSFPCatalog, SFPCatalog, Font, IniFile, IniLocator, InstallExecuteSequence, InstallUISequence, IsolatedComponent, LaunchCondition, ListBox, ListView, LockPermissions, Media, MoveFile, MsiAssembly, MsiAssemblyName, MsiDigitalCertificate, MsiDigitalSignature, MsiFileHash, MsiPatchHeaders, ODBCAttribute, ODBCDriver, ODBCDataSource, ODBCSourceAttribute, ODBCTranslator, Patch, PatchPackage, PublishComponent, RadioButton, Registry, RegLocator, RemoveFile, RemoveIniFile, RemoveRegistry, ReserveCost, SelfReg, ServiceControl, ServiceInstall, Shortcut, Signature, TextStyle, TypeLib, UIText, Upgrade, Verb]
_Validation_records = [
(u'_Validation',u'Table',u'N',None, None, None, None, u'Identifier',None, u'Name of table',),
(u'_Validation',u'Column',u'N',None, None, None, None, u'Identifier',None, u'Name of column',),
(u'_Validation',u'Description',u'Y',None, None, None, None, u'Text',None, u'Description of column',),
(u'_Validation',u'Set',u'Y',None, None, None, None, u'Text',None, u'Set of values that are permitted',),
(u'_Validation',u'Category',u'Y',None, None, None, None, None, u'Text;Formatted;Template;Condition;Guid;Path;Version;Language;Identifier;Binary;UpperCase;LowerCase;Filename;Paths;AnyPath;WildCardFilename;RegPath;KeyFormatted;CustomSource;Property;Cabinet;Shortcut;URL',u'String category',),
(u'_Validation',u'KeyColumn',u'Y',1,32,None, None, None, None, u'Column to which foreign key connects',),
(u'_Validation',u'KeyTable',u'Y',None, None, None, None, u'Identifier',None, u'For foreign key, Name of table to which data must link',),
(u'_Validation',u'MaxValue',u'Y',-2147483647,2147483647,None, None, None, None, u'Maximum value allowed',),
(u'_Validation',u'MinValue',u'Y',-2147483647,2147483647,None, None, None, None, u'Minimum value allowed',),
(u'_Validation',u'Nullable',u'N',None, None, None, None, None, u'Y;N;@',u'Whether the column is nullable',),
(u'ActionText',u'Description',u'Y',None, None, None, None, u'Text',None, u'Localized description displayed in progress dialog and log when action is executing.',),
(u'ActionText',u'Action',u'N',None, None, None, None, u'Identifier',None, u'Name of action to be described.',),
(u'ActionText',u'Template',u'Y',None, None, None, None, u'Template',None, u'Optional localized format template used to format action data records for display during action execution.',),
(u'AdminExecuteSequence',u'Action',u'N',None, None, None, None, u'Identifier',None, u'Name of action to invoke, either in the engine or the handler DLL.',),
(u'AdminExecuteSequence',u'Condition',u'Y',None, None, None, None, u'Condition',None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
(u'AdminExecuteSequence',u'Sequence',u'Y',-4,32767,None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
(u'Condition',u'Condition',u'Y',None, None, None, None, u'Condition',None, u'Expression evaluated to determine if Level in the Feature table is to change.',),
(u'Condition',u'Feature_',u'N',None, None, u'Feature',1,u'Identifier',None, u'Reference to a Feature entry in Feature table.',),
(u'Condition',u'Level',u'N',0,32767,None, None, None, None, u'New selection Level to set in Feature table if Condition evaluates to TRUE.',),
(u'AdminUISequence',u'Action',u'N',None, None, None, None, u'Identifier',None, u'Name of action to invoke, either in the engine or the handler DLL.',),
(u'AdminUISequence',u'Condition',u'Y',None, None, None, None, u'Condition',None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
(u'AdminUISequence',u'Sequence',u'Y',-4,32767,None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
(u'AdvtExecuteSequence',u'Action',u'N',None, None, None, None, u'Identifier',None, u'Name of action to invoke, either in the engine or the handler DLL.',),
(u'AdvtExecuteSequence',u'Condition',u'Y',None, None, None, None, u'Condition',None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
(u'AdvtExecuteSequence',u'Sequence',u'Y',-4,32767,None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
(u'AdvtUISequence',u'Action',u'N',None, None, None, None, u'Identifier',None, u'Name of action to invoke, either in the engine or the handler DLL.',),
(u'AdvtUISequence',u'Condition',u'Y',None, None, None, None, u'Condition',None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
(u'AdvtUISequence',u'Sequence',u'Y',-4,32767,None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
(u'AppId',u'AppId',u'N',None, None, None, None, u'Guid',None, None, ),
(u'AppId',u'ActivateAtStorage',u'Y',0,1,None, None, None, None, None, ),
(u'AppId',u'DllSurrogate',u'Y',None, None, None, None, u'Text',None, None, ),
(u'AppId',u'LocalService',u'Y',None, None, None, None, u'Text',None, None, ),
(u'AppId',u'RemoteServerName',u'Y',None, None, None, None, u'Formatted',None, None, ),
(u'AppId',u'RunAsInteractiveUser',u'Y',0,1,None, None, None, None, None, ),
(u'AppId',u'ServiceParameters',u'Y',None, None, None, None, u'Text',None, None, ),
(u'AppSearch',u'Property',u'N',None, None, None, None, u'Identifier',None, u'The property associated with a Signature',),
(u'AppSearch',u'Signature_',u'N',None, None, u'Signature;RegLocator;IniLocator;DrLocator;CompLocator',1,u'Identifier',None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.',),
(u'Property',u'Property',u'N',None, None, None, None, u'Identifier',None, u'Name of property, uppercase if settable by launcher or loader.',),
(u'Property',u'Value',u'N',None, None, None, None, u'Text',None, u'String value for property. Never null or empty.',),
(u'BBControl',u'Type',u'N',None, None, None, None, u'Identifier',None, u'The type of the control.',),
(u'BBControl',u'Y',u'N',0,32767,None, None, None, None, u'Vertical coordinate of the upper left corner of the bounding rectangle of the control.',),
(u'BBControl',u'Text',u'Y',None, None, None, None, u'Text',None, u'A string used to set the initial text contained within a control (if appropriate).',),
(u'BBControl',u'BBControl',u'N',None, None, None, None, u'Identifier',None, u'Name of the control. This name must be unique within a billboard, but can repeat on different billboard.',),
(u'BBControl',u'Attributes',u'Y',0,2147483647,None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this control.',),
(u'BBControl',u'Billboard_',u'N',None, None, u'Billboard',1,u'Identifier',None, u'External key to the Billboard table, name of the billboard.',),
(u'BBControl',u'Height',u'N',0,32767,None, None, None, None, u'Height of the bounding rectangle of the control.',),
(u'BBControl',u'Width',u'N',0,32767,None, None, None, None, u'Width of the bounding rectangle of the control.',),
(u'BBControl',u'X',u'N',0,32767,None, None, None, None, u'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.',),
(u'Billboard',u'Action',u'Y',None, None, None, None, u'Identifier',None, u'The name of an action. The billboard is displayed during the progress messages received from this action.',),
(u'Billboard',u'Billboard',u'N',None, None, None, None, u'Identifier',None, u'Name of the billboard.',),
(u'Billboard',u'Feature_',u'N',None, None, u'Feature',1,u'Identifier',None, u'An external key to the Feature Table. The billboard is shown only if this feature is being installed.',),
(u'Billboard',u'Ordering',u'Y',0,32767,None, None, None, None, u'A positive integer. If there is more than one billboard corresponding to an action they will be shown in the order defined by this column.',),
(u'Feature',u'Description',u'Y',None, None, None, None, u'Text',None, u'Longer descriptive text describing a visible feature item.',),
(u'Feature',u'Attributes',u'N',None, None, None, None, None, u'0;1;2;4;5;6;8;9;10;16;17;18;20;21;22;24;25;26;32;33;34;36;37;38;48;49;50;52;53;54',u'Feature attributes',),
(u'Feature',u'Feature',u'N',None, None, None, None, u'Identifier',None, u'Primary key used to identify a particular feature record.',),
(u'Feature',u'Directory_',u'Y',None, None, u'Directory',1,u'UpperCase',None, u'The name of the Directory that can be configured by the UI. A non-null value will enable the browse button.',),
(u'Feature',u'Level',u'N',0,32767,None, None, None, None, u'The install level at which record will be initially selected. An install level of 0 will disable an item and prevent its display.',),
(u'Feature',u'Title',u'Y',None, None, None, None, u'Text',None, u'Short text identifying a visible feature item.',),
(u'Feature',u'Display',u'Y',0,32767,None, None, None, None, u'Numeric sort order, used to force a specific display ordering.',),
(u'Feature',u'Feature_Parent',u'Y',None, None, u'Feature',1,u'Identifier',None, u'Optional key of a parent record in the same table. If the parent is not selected, then the record will not be installed. Null indicates a root item.',),
(u'Binary',u'Name',u'N',None, None, None, None, u'Identifier',None, u'Unique key identifying the binary data.',),
(u'Binary',u'Data',u'N',None, None, None, None, u'Binary',None, u'The unformatted binary data.',),
(u'BindImage',u'File_',u'N',None, None, u'File',1,u'Identifier',None, u'The index into the File table. This must be an executable file.',),
(u'BindImage',u'Path',u'Y',None, None, None, None, u'Paths',None, u'A list of ; delimited paths that represent the paths to be searched for the import DLLS. The list is usually a list of properties each enclosed within square brackets [] .',),
(u'File',u'Sequence',u'N',1,32767,None, None, None, None, u'Sequence with respect to the media images; order must track cabinet order.',),
(u'File',u'Attributes',u'Y',0,32767,None, None, None, None, u'Integer containing bit flags representing file attributes (with the decimal value of each bit position in parentheses)',),
(u'File',u'File',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token, must match identifier in cabinet. For uncompressed files, this field is ignored.',),
(u'File',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key referencing Component that controls the file.',),
(u'File',u'FileName',u'N',None, None, None, None, u'Filename',None, u'File name used for installation, may be localized. This may contain a "short name|long name" pair.',),
(u'File',u'FileSize',u'N',0,2147483647,None, None, None, None, u'Size of file in bytes (long integer).',),
(u'File',u'Language',u'Y',None, None, None, None, u'Language',None, u'List of decimal language Ids, comma-separated if more than one.',),
(u'File',u'Version',u'Y',None, None, u'File',1,u'Version',None, u'Version string for versioned files; Blank for unversioned files.',),
(u'CCPSearch',u'Signature_',u'N',None, None, u'Signature;RegLocator;IniLocator;DrLocator;CompLocator',1,u'Identifier',None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.',),
(u'CheckBox',u'Property',u'N',None, None, None, None, u'Identifier',None, u'A named property to be tied to the item.',),
(u'CheckBox',u'Value',u'Y',None, None, None, None, u'Formatted',None, u'The value string associated with the item.',),
(u'Class',u'Description',u'Y',None, None, None, None, u'Text',None, u'Localized description for the Class.',),
(u'Class',u'Attributes',u'Y',None, 32767,None, None, None, None, u'Class registration attributes.',),
(u'Class',u'Feature_',u'N',None, None, u'Feature',1,u'Identifier',None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.',),
(u'Class',u'AppId_',u'Y',None, None, u'AppId',1,u'Guid',None, u'Optional AppID containing DCOM information for associated application (string GUID).',),
(u'Class',u'Argument',u'Y',None, None, None, None, u'Formatted',None, u'optional argument for LocalServers.',),
(u'Class',u'CLSID',u'N',None, None, None, None, u'Guid',None, u'The CLSID of an OLE factory.',),
(u'Class',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
(u'Class',u'Context',u'N',None, None, None, None, u'Identifier',None, u'The numeric server context for this server. CLSCTX_xxxx',),
(u'Class',u'DefInprocHandler',u'Y',None, None, None, None, u'Filename',u'1;2;3',u'Optional default inproc handler. Only optionally provided if Context=CLSCTX_LOCAL_SERVER. Typically "ole32.dll" or "mapi32.dll"',),
(u'Class',u'FileTypeMask',u'Y',None, None, None, None, u'Text',None, u'Optional string containing information for the HKCRthis CLSID) key. If multiple patterns exist, they must be delimited by a semicolon, and numeric subkeys will be generated: 0,1,2...',),
(u'Class',u'Icon_',u'Y',None, None, u'Icon',1,u'Identifier',None, u'Optional foreign key into the Icon Table, specifying the icon file associated with this CLSID. Will be written under the DefaultIcon key.',),
(u'Class',u'IconIndex',u'Y',-32767,32767,None, None, None, None, u'Optional icon index.',),
(u'Class',u'ProgId_Default',u'Y',None, None, u'ProgId',1,u'Text',None, u'Optional ProgId associated with this CLSID.',),
(u'Component',u'Condition',u'Y',None, None, None, None, u'Condition',None, u"A conditional statement that will disable this component if the specified condition evaluates to the 'True' state. If a component is disabled, it will not be installed, regardless of the 'Action' state associated with the component.",),
(u'Component',u'Attributes',u'N',None, None, None, None, None, None, u'Remote execution option, one of irsEnum',),
(u'Component',u'Component',u'N',None, None, None, None, u'Identifier',None, u'Primary key used to identify a particular component record.',),
(u'Component',u'ComponentId',u'Y',None, None, None, None, u'Guid',None, u'A string GUID unique to this component, version, and language.',),
(u'Component',u'Directory_',u'N',None, None, u'Directory',1,u'Identifier',None, u'Required key of a Directory table record. This is actually a property name whose value contains the actual path, set either by the AppSearch action or with the default setting obtained from the Directory table.',),
(u'Component',u'KeyPath',u'Y',None, None, u'File;Registry;ODBCDataSource',1,u'Identifier',None, u'Either the primary key into the File table, Registry table, or ODBCDataSource table. This extract path is stored when the component is installed, and is used to detect the presence of the component and to return the path to it.',),
(u'Icon',u'Name',u'N',None, None, None, None, u'Identifier',None, u'Primary key. Name of the icon file.',),
(u'Icon',u'Data',u'N',None, None, None, None, u'Binary',None, u'Binary stream. The binary icon data in PE (.DLL or .EXE) or icon (.ICO) format.',),
(u'ProgId',u'Description',u'Y',None, None, None, None, u'Text',None, u'Localized description for the Program identifier.',),
(u'ProgId',u'Icon_',u'Y',None, None, u'Icon',1,u'Identifier',None, u'Optional foreign key into the Icon Table, specifying the icon file associated with this ProgId. Will be written under the DefaultIcon key.',),
(u'ProgId',u'IconIndex',u'Y',-32767,32767,None, None, None, None, u'Optional icon index.',),
(u'ProgId',u'ProgId',u'N',None, None, None, None, u'Text',None, u'The Program Identifier. Primary key.',),
(u'ProgId',u'Class_',u'Y',None, None, u'Class',1,u'Guid',None, u'The CLSID of an OLE factory corresponding to the ProgId.',),
(u'ProgId',u'ProgId_Parent',u'Y',None, None, u'ProgId',1,u'Text',None, u'The Parent Program Identifier. If specified, the ProgId column becomes a version independent prog id.',),
(u'ComboBox',u'Text',u'Y',None, None, None, None, u'Formatted',None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
(u'ComboBox',u'Property',u'N',None, None, None, None, u'Identifier',None, u'A named property to be tied to this item. All the items tied to the same property become part of the same combobox.',),
(u'ComboBox',u'Value',u'N',None, None, None, None, u'Formatted',None, u'The value string associated with this item. Selecting the line will set the associated property to this value.',),
(u'ComboBox',u'Order',u'N',1,32767,None, None, None, None, u'A positive integer used to determine the ordering of the items within one list.\tThe integers do not have to be consecutive.',),
(u'CompLocator',u'Type',u'Y',0,1,None, None, None, None, u'A boolean value that determines if the registry value is a filename or a directory location.',),
(u'CompLocator',u'Signature_',u'N',None, None, None, None, u'Identifier',None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
(u'CompLocator',u'ComponentId',u'N',None, None, None, None, u'Guid',None, u'A string GUID unique to this component, version, and language.',),
(u'Complus',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key referencing Component that controls the ComPlus component.',),
(u'Complus',u'ExpType',u'Y',0,32767,None, None, None, None, u'ComPlus component attributes.',),
(u'Directory',u'Directory',u'N',None, None, None, None, u'Identifier',None, u'Unique identifier for directory entry, primary key. If a property by this name is defined, it contains the full path to the directory.',),
(u'Directory',u'DefaultDir',u'N',None, None, None, None, u'DefaultDir',None, u"The default sub-path under parent's path.",),
(u'Directory',u'Directory_Parent',u'Y',None, None, u'Directory',1,u'Identifier',None, u'Reference to the entry in this table specifying the default parent directory. A record parented to itself or with a Null parent represents a root of the install tree.',),
(u'Control',u'Type',u'N',None, None, None, None, u'Identifier',None, u'The type of the control.',),
(u'Control',u'Y',u'N',0,32767,None, None, None, None, u'Vertical coordinate of the upper left corner of the bounding rectangle of the control.',),
(u'Control',u'Text',u'Y',None, None, None, None, u'Formatted',None, u'A string used to set the initial text contained within a control (if appropriate).',),
(u'Control',u'Property',u'Y',None, None, None, None, u'Identifier',None, u'The name of a defined property to be linked to this control. ',),
(u'Control',u'Attributes',u'Y',0,2147483647,None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this control.',),
(u'Control',u'Height',u'N',0,32767,None, None, None, None, u'Height of the bounding rectangle of the control.',),
(u'Control',u'Width',u'N',0,32767,None, None, None, None, u'Width of the bounding rectangle of the control.',),
(u'Control',u'X',u'N',0,32767,None, None, None, None, u'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.',),
(u'Control',u'Control',u'N',None, None, None, None, u'Identifier',None, u'Name of the control. This name must be unique within a dialog, but can repeat on different dialogs. ',),
(u'Control',u'Control_Next',u'Y',None, None, u'Control',2,u'Identifier',None, u'The name of an other control on the same dialog. This link defines the tab order of the controls. The links have to form one or more cycles!',),
(u'Control',u'Dialog_',u'N',None, None, u'Dialog',1,u'Identifier',None, u'External key to the Dialog table, name of the dialog.',),
(u'Control',u'Help',u'Y',None, None, None, None, u'Text',None, u'The help strings used with the button. The text is optional. ',),
(u'Dialog',u'Attributes',u'Y',0,2147483647,None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this dialog.',),
(u'Dialog',u'Height',u'N',0,32767,None, None, None, None, u'Height of the bounding rectangle of the dialog.',),
(u'Dialog',u'Width',u'N',0,32767,None, None, None, None, u'Width of the bounding rectangle of the dialog.',),
(u'Dialog',u'Dialog',u'N',None, None, None, None, u'Identifier',None, u'Name of the dialog.',),
(u'Dialog',u'Control_Cancel',u'Y',None, None, u'Control',2,u'Identifier',None, u'Defines the cancel control. Hitting escape or clicking on the close icon on the dialog is equivalent to pushing this button.',),
(u'Dialog',u'Control_Default',u'Y',None, None, u'Control',2,u'Identifier',None, u'Defines the default control. Hitting return is equivalent to pushing this button.',),
(u'Dialog',u'Control_First',u'N',None, None, u'Control',2,u'Identifier',None, u'Defines the control that has the focus when the dialog is created.',),
(u'Dialog',u'HCentering',u'N',0,100,None, None, None, None, u'Horizontal position of the dialog on a 0-100 scale. 0 means left end, 100 means right end of the screen, 50 center.',),
(u'Dialog',u'Title',u'Y',None, None, None, None, u'Formatted',None, u"A text string specifying the title to be displayed in the title bar of the dialog's window.",),
(u'Dialog',u'VCentering',u'N',0,100,None, None, None, None, u'Vertical position of the dialog on a 0-100 scale. 0 means top end, 100 means bottom end of the screen, 50 center.',),
(u'ControlCondition',u'Action',u'N',None, None, None, None, None, u'Default;Disable;Enable;Hide;Show',u'The desired action to be taken on the specified control.',),
(u'ControlCondition',u'Condition',u'N',None, None, None, None, u'Condition',None, u'A standard conditional statement that specifies under which conditions the action should be triggered.',),
(u'ControlCondition',u'Dialog_',u'N',None, None, u'Dialog',1,u'Identifier',None, u'A foreign key to the Dialog table, name of the dialog.',),
(u'ControlCondition',u'Control_',u'N',None, None, u'Control',2,u'Identifier',None, u'A foreign key to the Control table, name of the control.',),
(u'ControlEvent',u'Condition',u'Y',None, None, None, None, u'Condition',None, u'A standard conditional statement that specifies under which conditions an event should be triggered.',),
(u'ControlEvent',u'Ordering',u'Y',0,2147483647,None, None, None, None, u'An integer used to order several events tied to the same control. Can be left blank.',),
(u'ControlEvent',u'Argument',u'N',None, None, None, None, u'Formatted',None, u'A value to be used as a modifier when triggering a particular event.',),
(u'ControlEvent',u'Dialog_',u'N',None, None, u'Dialog',1,u'Identifier',None, u'A foreign key to the Dialog table, name of the dialog.',),
(u'ControlEvent',u'Control_',u'N',None, None, u'Control',2,u'Identifier',None, u'A foreign key to the Control table, name of the control',),
(u'ControlEvent',u'Event',u'N',None, None, None, None, u'Formatted',None, u'An identifier that specifies the type of the event that should take place when the user interacts with control specified by the first two entries.',),
(u'CreateFolder',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into the Component table.',),
(u'CreateFolder',u'Directory_',u'N',None, None, u'Directory',1,u'Identifier',None, u'Primary key, could be foreign key into the Directory table.',),
(u'CustomAction',u'Type',u'N',1,16383,None, None, None, None, u'The numeric custom action type, consisting of source location, code type, entry, option flags.',),
(u'CustomAction',u'Action',u'N',None, None, None, None, u'Identifier',None, u'Primary key, name of action, normally appears in sequence table unless private use.',),
(u'CustomAction',u'Source',u'Y',None, None, None, None, u'CustomSource',None, u'The table reference of the source of the code.',),
(u'CustomAction',u'Target',u'Y',None, None, None, None, u'Formatted',None, u'Excecution parameter, depends on the type of custom action',),
(u'DrLocator',u'Signature_',u'N',None, None, None, None, u'Identifier',None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
(u'DrLocator',u'Path',u'Y',None, None, None, None, u'AnyPath',None, u'The path on the user system. This is a either a subpath below the value of the Parent or a full path. The path may contain properties enclosed within [ ] that will be expanded.',),
(u'DrLocator',u'Depth',u'Y',0,32767,None, None, None, None, u'The depth below the path to which the Signature_ is recursively searched. If absent, the depth is assumed to be 0.',),
(u'DrLocator',u'Parent',u'Y',None, None, None, None, u'Identifier',None, u'The parent file signature. It is also a foreign key in the Signature table. If null and the Path column does not expand to a full path, then all the fixed drives of the user system are searched using the Path.',),
(u'DuplicateFile',u'File_',u'N',None, None, u'File',1,u'Identifier',None, u'Foreign key referencing the source file to be duplicated.',),
(u'DuplicateFile',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key referencing Component that controls the duplicate file.',),
(u'DuplicateFile',u'DestFolder',u'Y',None, None, None, None, u'Identifier',None, u'Name of a property whose value is assumed to resolve to the full pathname to a destination folder.',),
(u'DuplicateFile',u'DestName',u'Y',None, None, None, None, u'Filename',None, u'Filename to be given to the duplicate file.',),
(u'DuplicateFile',u'FileKey',u'N',None, None, None, None, u'Identifier',None, u'Primary key used to identify a particular file entry',),
(u'Environment',u'Name',u'N',None, None, None, None, u'Text',None, u'The name of the environmental value.',),
(u'Environment',u'Value',u'Y',None, None, None, None, u'Formatted',None, u'The value to set in the environmental settings.',),
(u'Environment',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into the Component table referencing component that controls the installing of the environmental value.',),
(u'Environment',u'Environment',u'N',None, None, None, None, u'Identifier',None, u'Unique identifier for the environmental variable setting',),
(u'Error',u'Error',u'N',0,32767,None, None, None, None, u'Integer error number, obtained from header file IError(...) macros.',),
(u'Error',u'Message',u'Y',None, None, None, None, u'Template',None, u'Error formatting template, obtained from user ed. or localizers.',),
(u'EventMapping',u'Dialog_',u'N',None, None, u'Dialog',1,u'Identifier',None, u'A foreign key to the Dialog table, name of the Dialog.',),
(u'EventMapping',u'Control_',u'N',None, None, u'Control',2,u'Identifier',None, u'A foreign key to the Control table, name of the control.',),
(u'EventMapping',u'Event',u'N',None, None, None, None, u'Identifier',None, u'An identifier that specifies the type of the event that the control subscribes to.',),
(u'EventMapping',u'Attribute',u'N',None, None, None, None, u'Identifier',None, u'The name of the control attribute, that is set when this event is received.',),
(u'Extension',u'Feature_',u'N',None, None, u'Feature',1,u'Identifier',None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.',),
(u'Extension',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
(u'Extension',u'Extension',u'N',None, None, None, None, u'Text',None, u'The extension associated with the table row.',),
(u'Extension',u'MIME_',u'Y',None, None, u'MIME',1,u'Text',None, u'Optional Context identifier, typically "type/format" associated with the extension',),
(u'Extension',u'ProgId_',u'Y',None, None, u'ProgId',1,u'Text',None, u'Optional ProgId associated with this extension.',),
(u'MIME',u'CLSID',u'Y',None, None, None, None, u'Guid',None, u'Optional associated CLSID.',),
(u'MIME',u'ContentType',u'N',None, None, None, None, u'Text',None, u'Primary key. Context identifier, typically "type/format".',),
(u'MIME',u'Extension_',u'N',None, None, u'Extension',1,u'Text',None, u'Optional associated extension (without dot)',),
(u'FeatureComponents',u'Feature_',u'N',None, None, u'Feature',1,u'Identifier',None, u'Foreign key into Feature table.',),
(u'FeatureComponents',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into Component table.',),
(u'FileSFPCatalog',u'File_',u'N',None, None, u'File',1,u'Identifier',None, u'File associated with the catalog',),
(u'FileSFPCatalog',u'SFPCatalog_',u'N',None, None, u'SFPCatalog',1,u'Filename',None, u'Catalog associated with the file',),
(u'SFPCatalog',u'SFPCatalog',u'N',None, None, None, None, u'Filename',None, u'File name for the catalog.',),
(u'SFPCatalog',u'Catalog',u'N',None, None, None, None, u'Binary',None, u'SFP Catalog',),
(u'SFPCatalog',u'Dependency',u'Y',None, None, None, None, u'Formatted',None, u'Parent catalog - only used by SFP',),
(u'Font',u'File_',u'N',None, None, u'File',1,u'Identifier',None, u'Primary key, foreign key into File table referencing font file.',),
(u'Font',u'FontTitle',u'Y',None, None, None, None, u'Text',None, u'Font name.',),
(u'IniFile',u'Action',u'N',None, None, None, None, None, u'0;1;3',u'The type of modification to be made, one of iifEnum',),
(u'IniFile',u'Value',u'N',None, None, None, None, u'Formatted',None, u'The value to be written.',),
(u'IniFile',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into the Component table referencing component that controls the installing of the .INI value.',),
(u'IniFile',u'FileName',u'N',None, None, None, None, u'Filename',None, u'The .INI file name in which to write the information',),
(u'IniFile',u'IniFile',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token.',),
(u'IniFile',u'DirProperty',u'Y',None, None, None, None, u'Identifier',None, u'Foreign key into the Directory table denoting the directory where the .INI file is.',),
(u'IniFile',u'Key',u'N',None, None, None, None, u'Formatted',None, u'The .INI file key below Section.',),
(u'IniFile',u'Section',u'N',None, None, None, None, u'Formatted',None, u'The .INI file Section.',),
(u'IniLocator',u'Type',u'Y',0,2,None, None, None, None, u'An integer value that determines if the .INI value read is a filename or a directory location or to be used as is w/o interpretation.',),
(u'IniLocator',u'Signature_',u'N',None, None, None, None, u'Identifier',None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
(u'IniLocator',u'FileName',u'N',None, None, None, None, u'Filename',None, u'The .INI file name.',),
(u'IniLocator',u'Key',u'N',None, None, None, None, u'Text',None, u'Key value (followed by an equals sign in INI file).',),
(u'IniLocator',u'Section',u'N',None, None, None, None, u'Text',None, u'Section name within in file (within square brackets in INI file).',),
(u'IniLocator',u'Field',u'Y',0,32767,None, None, None, None, u'The field in the .INI line. If Field is null or 0 the entire line is read.',),
(u'InstallExecuteSequence',u'Action',u'N',None, None, None, None, u'Identifier',None, u'Name of action to invoke, either in the engine or the handler DLL.',),
(u'InstallExecuteSequence',u'Condition',u'Y',None, None, None, None, u'Condition',None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
(u'InstallExecuteSequence',u'Sequence',u'Y',-4,32767,None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
(u'InstallUISequence',u'Action',u'N',None, None, None, None, u'Identifier',None, u'Name of action to invoke, either in the engine or the handler DLL.',),
(u'InstallUISequence',u'Condition',u'Y',None, None, None, None, u'Condition',None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
(u'InstallUISequence',u'Sequence',u'Y',-4,32767,None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
(u'IsolatedComponent',u'Component_Application',u'N',None, None, u'Component',1,u'Identifier',None, u'Key to Component table item for application',),
(u'IsolatedComponent',u'Component_Shared',u'N',None, None, u'Component',1,u'Identifier',None, u'Key to Component table item to be isolated',),
(u'LaunchCondition',u'Description',u'N',None, None, None, None, u'Formatted',None, u'Localizable text to display when condition fails and install must abort.',),
(u'LaunchCondition',u'Condition',u'N',None, None, None, None, u'Condition',None, u'Expression which must evaluate to TRUE in order for install to commence.',),
(u'ListBox',u'Text',u'Y',None, None, None, None, u'Text',None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
(u'ListBox',u'Property',u'N',None, None, None, None, u'Identifier',None, u'A named property to be tied to this item. All the items tied to the same property become part of the same listbox.',),
(u'ListBox',u'Value',u'N',None, None, None, None, u'Formatted',None, u'The value string associated with this item. Selecting the line will set the associated property to this value.',),
(u'ListBox',u'Order',u'N',1,32767,None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
(u'ListView',u'Text',u'Y',None, None, None, None, u'Text',None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
(u'ListView',u'Property',u'N',None, None, None, None, u'Identifier',None, u'A named property to be tied to this item. All the items tied to the same property become part of the same listview.',),
(u'ListView',u'Value',u'N',None, None, None, None, u'Identifier',None, u'The value string associated with this item. Selecting the line will set the associated property to this value.',),
(u'ListView',u'Order',u'N',1,32767,None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
(u'ListView',u'Binary_',u'Y',None, None, u'Binary',1,u'Identifier',None, u'The name of the icon to be displayed with the icon. The binary information is looked up from the Binary Table.',),
(u'LockPermissions',u'Table',u'N',None, None, None, None, u'Identifier',u'Directory;File;Registry',u'Reference to another table name',),
(u'LockPermissions',u'Domain',u'Y',None, None, None, None, u'Formatted',None, u'Domain name for user whose permissions are being set. (usually a property)',),
(u'LockPermissions',u'LockObject',u'N',None, None, None, None, u'Identifier',None, u'Foreign key into Registry or File table',),
(u'LockPermissions',u'Permission',u'Y',-2147483647,2147483647,None, None, None, None, u'Permission Access mask. Full Control = 268435456 (GENERIC_ALL = 0x10000000)',),
(u'LockPermissions',u'User',u'N',None, None, None, None, u'Formatted',None, u'User for permissions to be set. (usually a property)',),
(u'Media',u'Source',u'Y',None, None, None, None, u'Property',None, u'The property defining the location of the cabinet file.',),
(u'Media',u'Cabinet',u'Y',None, None, None, None, u'Cabinet',None, u'If some or all of the files stored on the media are compressed in a cabinet, the name of that cabinet.',),
(u'Media',u'DiskId',u'N',1,32767,None, None, None, None, u'Primary key, integer to determine sort order for table.',),
(u'Media',u'DiskPrompt',u'Y',None, None, None, None, u'Text',None, u'Disk name: the visible text actually printed on the disk. This will be used to prompt the user when this disk needs to be inserted.',),
(u'Media',u'LastSequence',u'N',0,32767,None, None, None, None, u'File sequence number for the last file for this media.',),
(u'Media',u'VolumeLabel',u'Y',None, None, None, None, u'Text',None, u'The label attributed to the volume.',),
(u'ModuleComponents',u'Component',u'N',None, None, u'Component',1,u'Identifier',None, u'Component contained in the module.',),
(u'ModuleComponents',u'Language',u'N',None, None, u'ModuleSignature',2,None, None, u'Default language ID for module (may be changed by transform).',),
(u'ModuleComponents',u'ModuleID',u'N',None, None, u'ModuleSignature',1,u'Identifier',None, u'Module containing the component.',),
(u'ModuleSignature',u'Language',u'N',None, None, None, None, None, None, u'Default decimal language of module.',),
(u'ModuleSignature',u'Version',u'N',None, None, None, None, u'Version',None, u'Version of the module.',),
(u'ModuleSignature',u'ModuleID',u'N',None, None, None, None, u'Identifier',None, u'Module identifier (String.GUID).',),
(u'ModuleDependency',u'ModuleID',u'N',None, None, u'ModuleSignature',1,u'Identifier',None, u'Module requiring the dependency.',),
(u'ModuleDependency',u'ModuleLanguage',u'N',None, None, u'ModuleSignature',2,None, None, u'Language of module requiring the dependency.',),
(u'ModuleDependency',u'RequiredID',u'N',None, None, None, None, None, None, u'String.GUID of required module.',),
(u'ModuleDependency',u'RequiredLanguage',u'N',None, None, None, None, None, None, u'LanguageID of the required module.',),
(u'ModuleDependency',u'RequiredVersion',u'Y',None, None, None, None, u'Version',None, u'Version of the required version.',),
(u'ModuleExclusion',u'ModuleID',u'N',None, None, u'ModuleSignature',1,u'Identifier',None, u'String.GUID of module with exclusion requirement.',),
(u'ModuleExclusion',u'ModuleLanguage',u'N',None, None, u'ModuleSignature',2,None, None, u'LanguageID of module with exclusion requirement.',),
(u'ModuleExclusion',u'ExcludedID',u'N',None, None, None, None, None, None, u'String.GUID of excluded module.',),
(u'ModuleExclusion',u'ExcludedLanguage',u'N',None, None, None, None, None, None, u'Language of excluded module.',),
(u'ModuleExclusion',u'ExcludedMaxVersion',u'Y',None, None, None, None, u'Version',None, u'Maximum version of excluded module.',),
(u'ModuleExclusion',u'ExcludedMinVersion',u'Y',None, None, None, None, u'Version',None, u'Minimum version of excluded module.',),
(u'MoveFile',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'If this component is not "selected" for installation or removal, no action will be taken on the associated MoveFile entry',),
(u'MoveFile',u'DestFolder',u'N',None, None, None, None, u'Identifier',None, u'Name of a property whose value is assumed to resolve to the full path to the destination directory',),
(u'MoveFile',u'DestName',u'Y',None, None, None, None, u'Filename',None, u'Name to be given to the original file after it is moved or copied. If blank, the destination file will be given the same name as the source file',),
(u'MoveFile',u'FileKey',u'N',None, None, None, None, u'Identifier',None, u'Primary key that uniquely identifies a particular MoveFile record',),
(u'MoveFile',u'Options',u'N',0,1,None, None, None, None, u'Integer value specifying the MoveFile operating mode, one of imfoEnum',),
(u'MoveFile',u'SourceFolder',u'Y',None, None, None, None, u'Identifier',None, u'Name of a property whose value is assumed to resolve to the full path to the source directory',),
(u'MoveFile',u'SourceName',u'Y',None, None, None, None, u'Text',None, u"Name of the source file(s) to be moved or copied. Can contain the '*' or '?' wildcards.",),
(u'MsiAssembly',u'Attributes',u'Y',None, None, None, None, None, None, u'Assembly attributes',),
(u'MsiAssembly',u'Feature_',u'N',None, None, u'Feature',1,u'Identifier',None, u'Foreign key into Feature table.',),
(u'MsiAssembly',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into Component table.',),
(u'MsiAssembly',u'File_Application',u'Y',None, None, u'File',1,u'Identifier',None, u'Foreign key into File table, denoting the application context for private assemblies. Null for global assemblies.',),
(u'MsiAssembly',u'File_Manifest',u'Y',None, None, u'File',1,u'Identifier',None, u'Foreign key into the File table denoting the manifest file for the assembly.',),
(u'MsiAssemblyName',u'Name',u'N',None, None, None, None, u'Text',None, u'The name part of the name-value pairs for the assembly name.',),
(u'MsiAssemblyName',u'Value',u'N',None, None, None, None, u'Text',None, u'The value part of the name-value pairs for the assembly name.',),
(u'MsiAssemblyName',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into Component table.',),
(u'MsiDigitalCertificate',u'CertData',u'N',None, None, None, None, u'Binary',None, u'A certificate context blob for a signer certificate',),
(u'MsiDigitalCertificate',u'DigitalCertificate',u'N',None, None, None, None, u'Identifier',None, u'A unique identifier for the row',),
(u'MsiDigitalSignature',u'Table',u'N',None, None, None, None, None, u'Media',u'Reference to another table name (only Media table is supported)',),
(u'MsiDigitalSignature',u'DigitalCertificate_',u'N',None, None, u'MsiDigitalCertificate',1,u'Identifier',None, u'Foreign key to MsiDigitalCertificate table identifying the signer certificate',),
(u'MsiDigitalSignature',u'Hash',u'Y',None, None, None, None, u'Binary',None, u'The encoded hash blob from the digital signature',),
(u'MsiDigitalSignature',u'SignObject',u'N',None, None, None, None, u'Text',None, u'Foreign key to Media table',),
(u'MsiFileHash',u'File_',u'N',None, None, u'File',1,u'Identifier',None, u'Primary key, foreign key into File table referencing file with this hash',),
(u'MsiFileHash',u'Options',u'N',0,32767,None, None, None, None, u'Various options and attributes for this hash.',),
(u'MsiFileHash',u'HashPart1',u'N',None, None, None, None, None, None, u'Size of file in bytes (long integer).',),
(u'MsiFileHash',u'HashPart2',u'N',None, None, None, None, None, None, u'Size of file in bytes (long integer).',),
(u'MsiFileHash',u'HashPart3',u'N',None, None, None, None, None, None, u'Size of file in bytes (long integer).',),
(u'MsiFileHash',u'HashPart4',u'N',None, None, None, None, None, None, u'Size of file in bytes (long integer).',),
(u'MsiPatchHeaders',u'StreamRef',u'N',None, None, None, None, u'Identifier',None, u'Primary key. A unique identifier for the row.',),
(u'MsiPatchHeaders',u'Header',u'N',None, None, None, None, u'Binary',None, u'Binary stream. The patch header, used for patch validation.',),
(u'ODBCAttribute',u'Value',u'Y',None, None, None, None, u'Text',None, u'Value for ODBC driver attribute',),
(u'ODBCAttribute',u'Attribute',u'N',None, None, None, None, u'Text',None, u'Name of ODBC driver attribute',),
(u'ODBCAttribute',u'Driver_',u'N',None, None, u'ODBCDriver',1,u'Identifier',None, u'Reference to ODBC driver in ODBCDriver table',),
(u'ODBCDriver',u'Description',u'N',None, None, None, None, u'Text',None, u'Text used as registered name for driver, non-localized',),
(u'ODBCDriver',u'File_',u'N',None, None, u'File',1,u'Identifier',None, u'Reference to key driver file',),
(u'ODBCDriver',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Reference to associated component',),
(u'ODBCDriver',u'Driver',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized.internal token for driver',),
(u'ODBCDriver',u'File_Setup',u'Y',None, None, u'File',1,u'Identifier',None, u'Optional reference to key driver setup DLL',),
(u'ODBCDataSource',u'Description',u'N',None, None, None, None, u'Text',None, u'Text used as registered name for data source',),
(u'ODBCDataSource',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Reference to associated component',),
(u'ODBCDataSource',u'DataSource',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized.internal token for data source',),
(u'ODBCDataSource',u'DriverDescription',u'N',None, None, None, None, u'Text',None, u'Reference to driver description, may be existing driver',),
(u'ODBCDataSource',u'Registration',u'N',0,1,None, None, None, None, u'Registration option: 0=machine, 1=user, others t.b.d.',),
(u'ODBCSourceAttribute',u'Value',u'Y',None, None, None, None, u'Text',None, u'Value for ODBC data source attribute',),
(u'ODBCSourceAttribute',u'Attribute',u'N',None, None, None, None, u'Text',None, u'Name of ODBC data source attribute',),
(u'ODBCSourceAttribute',u'DataSource_',u'N',None, None, u'ODBCDataSource',1,u'Identifier',None, u'Reference to ODBC data source in ODBCDataSource table',),
(u'ODBCTranslator',u'Description',u'N',None, None, None, None, u'Text',None, u'Text used as registered name for translator',),
(u'ODBCTranslator',u'File_',u'N',None, None, u'File',1,u'Identifier',None, u'Reference to key translator file',),
(u'ODBCTranslator',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Reference to associated component',),
(u'ODBCTranslator',u'File_Setup',u'Y',None, None, u'File',1,u'Identifier',None, u'Optional reference to key translator setup DLL',),
(u'ODBCTranslator',u'Translator',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized.internal token for translator',),
(u'Patch',u'Sequence',u'N',0,32767,None, None, None, None, u'Primary key, sequence with respect to the media images; order must track cabinet order.',),
(u'Patch',u'Attributes',u'N',0,32767,None, None, None, None, u'Integer containing bit flags representing patch attributes',),
(u'Patch',u'File_',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token, foreign key to File table, must match identifier in cabinet.',),
(u'Patch',u'Header',u'Y',None, None, None, None, u'Binary',None, u'Binary stream. The patch header, used for patch validation.',),
(u'Patch',u'PatchSize',u'N',0,2147483647,None, None, None, None, u'Size of patch in bytes (long integer).',),
(u'Patch',u'StreamRef_',u'Y',None, None, None, None, u'Identifier',None, u'Identifier. Foreign key to the StreamRef column of the MsiPatchHeaders table.',),
(u'PatchPackage',u'Media_',u'N',0,32767,None, None, None, None, u'Foreign key to DiskId column of Media table. Indicates the disk containing the patch package.',),
(u'PatchPackage',u'PatchId',u'N',None, None, None, None, u'Guid',None, u'A unique string GUID representing this patch.',),
(u'PublishComponent',u'Feature_',u'N',None, None, u'Feature',1,u'Identifier',None, u'Foreign key into the Feature table.',),
(u'PublishComponent',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into the Component table.',),
(u'PublishComponent',u'ComponentId',u'N',None, None, None, None, u'Guid',None, u'A string GUID that represents the component id that will be requested by the alien product.',),
(u'PublishComponent',u'AppData',u'Y',None, None, None, None, u'Text',None, u'This is localisable Application specific data that can be associated with a Qualified Component.',),
(u'PublishComponent',u'Qualifier',u'N',None, None, None, None, u'Text',None, u'This is defined only when the ComponentId column is an Qualified Component Id. This is the Qualifier for ProvideComponentIndirect.',),
(u'RadioButton',u'Y',u'N',0,32767,None, None, None, None, u'The vertical coordinate of the upper left corner of the bounding rectangle of the radio button.',),
(u'RadioButton',u'Text',u'Y',None, None, None, None, u'Text',None, u'The visible title to be assigned to the radio button.',),
(u'RadioButton',u'Property',u'N',None, None, None, None, u'Identifier',None, u'A named property to be tied to this radio button. All the buttons tied to the same property become part of the same group.',),
(u'RadioButton',u'Height',u'N',0,32767,None, None, None, None, u'The height of the button.',),
(u'RadioButton',u'Width',u'N',0,32767,None, None, None, None, u'The width of the button.',),
(u'RadioButton',u'X',u'N',0,32767,None, None, None, None, u'The horizontal coordinate of the upper left corner of the bounding rectangle of the radio button.',),
(u'RadioButton',u'Value',u'N',None, None, None, None, u'Formatted',None, u'The value string associated with this button. Selecting the button will set the associated property to this value.',),
(u'RadioButton',u'Order',u'N',1,32767,None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
(u'RadioButton',u'Help',u'Y',None, None, None, None, u'Text',None, u'The help strings used with the button. The text is optional.',),
(u'Registry',u'Name',u'Y',None, None, None, None, u'Formatted',None, u'The registry value name.',),
(u'Registry',u'Value',u'Y',None, None, None, None, u'Formatted',None, u'The registry value.',),
(u'Registry',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into the Component table referencing component that controls the installing of the registry value.',),
(u'Registry',u'Key',u'N',None, None, None, None, u'RegPath',None, u'The key for the registry value.',),
(u'Registry',u'Registry',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token.',),
(u'Registry',u'Root',u'N',-1,3,None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum.',),
(u'RegLocator',u'Name',u'Y',None, None, None, None, u'Formatted',None, u'The registry value name.',),
(u'RegLocator',u'Type',u'Y',0,18,None, None, None, None, u'An integer value that determines if the registry value is a filename or a directory location or to be used as is w/o interpretation.',),
(u'RegLocator',u'Signature_',u'N',None, None, None, None, u'Identifier',None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table. If the type is 0, the registry values refers a directory, and _Signature is not a foreign key.',),
(u'RegLocator',u'Key',u'N',None, None, None, None, u'RegPath',None, u'The key for the registry value.',),
(u'RegLocator',u'Root',u'N',0,3,None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum.',),
(u'RemoveFile',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key referencing Component that controls the file to be removed.',),
(u'RemoveFile',u'FileKey',u'N',None, None, None, None, u'Identifier',None, u'Primary key used to identify a particular file entry',),
(u'RemoveFile',u'FileName',u'Y',None, None, None, None, u'WildCardFilename',None, u'Name of the file to be removed.',),
(u'RemoveFile',u'DirProperty',u'N',None, None, None, None, u'Identifier',None, u'Name of a property whose value is assumed to resolve to the full pathname to the folder of the file to be removed.',),
(u'RemoveFile',u'InstallMode',u'N',None, None, None, None, None, u'1;2;3',u'Installation option, one of iimEnum.',),
(u'RemoveIniFile',u'Action',u'N',None, None, None, None, None, u'2;4',u'The type of modification to be made, one of iifEnum.',),
(u'RemoveIniFile',u'Value',u'Y',None, None, None, None, u'Formatted',None, u'The value to be deleted. The value is required when Action is iifIniRemoveTag',),
(u'RemoveIniFile',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into the Component table referencing component that controls the deletion of the .INI value.',),
(u'RemoveIniFile',u'FileName',u'N',None, None, None, None, u'Filename',None, u'The .INI file name in which to delete the information',),
(u'RemoveIniFile',u'DirProperty',u'Y',None, None, None, None, u'Identifier',None, u'Foreign key into the Directory table denoting the directory where the .INI file is.',),
(u'RemoveIniFile',u'Key',u'N',None, None, None, None, u'Formatted',None, u'The .INI file key below Section.',),
(u'RemoveIniFile',u'Section',u'N',None, None, None, None, u'Formatted',None, u'The .INI file Section.',),
(u'RemoveIniFile',u'RemoveIniFile',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token.',),
(u'RemoveRegistry',u'Name',u'Y',None, None, None, None, u'Formatted',None, u'The registry value name.',),
(u'RemoveRegistry',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into the Component table referencing component that controls the deletion of the registry value.',),
(u'RemoveRegistry',u'Key',u'N',None, None, None, None, u'RegPath',None, u'The key for the registry value.',),
(u'RemoveRegistry',u'Root',u'N',-1,3,None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum',),
(u'RemoveRegistry',u'RemoveRegistry',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token.',),
(u'ReserveCost',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Reserve a specified amount of space if this component is to be installed.',),
(u'ReserveCost',u'ReserveFolder',u'Y',None, None, None, None, u'Identifier',None, u'Name of a property whose value is assumed to resolve to the full path to the destination directory',),
(u'ReserveCost',u'ReserveKey',u'N',None, None, None, None, u'Identifier',None, u'Primary key that uniquely identifies a particular ReserveCost record',),
(u'ReserveCost',u'ReserveLocal',u'N',0,2147483647,None, None, None, None, u'Disk space to reserve if linked component is installed locally.',),
(u'ReserveCost',u'ReserveSource',u'N',0,2147483647,None, None, None, None, u'Disk space to reserve if linked component is installed to run from the source location.',),
(u'SelfReg',u'File_',u'N',None, None, u'File',1,u'Identifier',None, u'Foreign key into the File table denoting the module that needs to be registered.',),
(u'SelfReg',u'Cost',u'Y',0,32767,None, None, None, None, u'The cost of registering the module.',),
(u'ServiceControl',u'Name',u'N',None, None, None, None, u'Formatted',None, u'Name of a service. /, \\, comma and space are invalid',),
(u'ServiceControl',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Required foreign key into the Component Table that controls the startup of the service',),
(u'ServiceControl',u'Event',u'N',0,187,None, None, None, None, u'Bit field: Install: 0x1 = Start, 0x2 = Stop, 0x8 = Delete, Uninstall: 0x10 = Start, 0x20 = Stop, 0x80 = Delete',),
(u'ServiceControl',u'ServiceControl',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token.',),
(u'ServiceControl',u'Arguments',u'Y',None, None, None, None, u'Formatted',None, u'Arguments for the service. Separate by [~].',),
(u'ServiceControl',u'Wait',u'Y',0,1,None, None, None, None, u'Boolean for whether to wait for the service to fully start',),
(u'ServiceInstall',u'Name',u'N',None, None, None, None, u'Formatted',None, u'Internal Name of the Service',),
(u'ServiceInstall',u'Description',u'Y',None, None, None, None, u'Text',None, u'Description of service.',),
(u'ServiceInstall',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Required foreign key into the Component Table that controls the startup of the service',),
(u'ServiceInstall',u'Arguments',u'Y',None, None, None, None, u'Formatted',None, u'Arguments to include in every start of the service, passed to WinMain',),
(u'ServiceInstall',u'ServiceInstall',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token.',),
(u'ServiceInstall',u'Dependencies',u'Y',None, None, None, None, u'Formatted',None, u'Other services this depends on to start. Separate by [~], and end with [~][~]',),
(u'ServiceInstall',u'DisplayName',u'Y',None, None, None, None, u'Formatted',None, u'External Name of the Service',),
(u'ServiceInstall',u'ErrorControl',u'N',-2147483647,2147483647,None, None, None, None, u'Severity of error if service fails to start',),
(u'ServiceInstall',u'LoadOrderGroup',u'Y',None, None, None, None, u'Formatted',None, u'LoadOrderGroup',),
(u'ServiceInstall',u'Password',u'Y',None, None, None, None, u'Formatted',None, u'password to run service with. (with StartName)',),
(u'ServiceInstall',u'ServiceType',u'N',-2147483647,2147483647,None, None, None, None, u'Type of the service',),
(u'ServiceInstall',u'StartName',u'Y',None, None, None, None, u'Formatted',None, u'User or object name to run service as',),
(u'ServiceInstall',u'StartType',u'N',0,4,None, None, None, None, u'Type of the service',),
(u'Shortcut',u'Name',u'N',None, None, None, None, u'Filename',None, u'The name of the shortcut to be created.',),
(u'Shortcut',u'Description',u'Y',None, None, None, None, u'Text',None, u'The description for the shortcut.',),
(u'Shortcut',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into the Component table denoting the component whose selection gates the the shortcut creation/deletion.',),
(u'Shortcut',u'Icon_',u'Y',None, None, u'Icon',1,u'Identifier',None, u'Foreign key into the File table denoting the external icon file for the shortcut.',),
(u'Shortcut',u'IconIndex',u'Y',-32767,32767,None, None, None, None, u'The icon index for the shortcut.',),
(u'Shortcut',u'Directory_',u'N',None, None, u'Directory',1,u'Identifier',None, u'Foreign key into the Directory table denoting the directory where the shortcut file is created.',),
(u'Shortcut',u'Target',u'N',None, None, None, None, u'Shortcut',None, u'The shortcut target. This is usually a property that is expanded to a file or a folder that the shortcut points to.',),
(u'Shortcut',u'Arguments',u'Y',None, None, None, None, u'Formatted',None, u'The command-line arguments for the shortcut.',),
(u'Shortcut',u'Shortcut',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token.',),
(u'Shortcut',u'Hotkey',u'Y',0,32767,None, None, None, None, u'The hotkey for the shortcut. It has the virtual-key code for the key in the low-order byte, and the modifier flags in the high-order byte. ',),
(u'Shortcut',u'ShowCmd',u'Y',None, None, None, None, None, u'1;3;7',u'The show command for the application window.The following values may be used.',),
(u'Shortcut',u'WkDir',u'Y',None, None, None, None, u'Identifier',None, u'Name of property defining location of working directory.',),
(u'Signature',u'FileName',u'N',None, None, None, None, u'Filename',None, u'The name of the file. This may contain a "short name|long name" pair.',),
(u'Signature',u'Signature',u'N',None, None, None, None, u'Identifier',None, u'The table key. The Signature represents a unique file signature.',),
(u'Signature',u'Languages',u'Y',None, None, None, None, u'Language',None, u'The languages supported by the file.',),
(u'Signature',u'MaxDate',u'Y',0,2147483647,None, None, None, None, u'The maximum creation date of the file.',),
(u'Signature',u'MaxSize',u'Y',0,2147483647,None, None, None, None, u'The maximum size of the file. ',),
(u'Signature',u'MaxVersion',u'Y',None, None, None, None, u'Text',None, u'The maximum version of the file.',),
(u'Signature',u'MinDate',u'Y',0,2147483647,None, None, None, None, u'The minimum creation date of the file.',),
(u'Signature',u'MinSize',u'Y',0,2147483647,None, None, None, None, u'The minimum size of the file.',),
(u'Signature',u'MinVersion',u'Y',None, None, None, None, u'Text',None, u'The minimum version of the file.',),
(u'TextStyle',u'TextStyle',u'N',None, None, None, None, u'Identifier',None, u'Name of the style. The primary key of this table. This name is embedded in the texts to indicate a style change.',),
(u'TextStyle',u'Color',u'Y',0,16777215,None, None, None, None, u'A long integer indicating the color of the string in the RGB format (Red, Green, Blue each 0-255, RGB = R + 256*G + 256^2*B).',),
(u'TextStyle',u'FaceName',u'N',None, None, None, None, u'Text',None, u'A string indicating the name of the font used. Required. The string must be at most 31 characters long.',),
(u'TextStyle',u'Size',u'N',0,32767,None, None, None, None, u'The size of the font used. This size is given in our units (1/12 of the system font height). Assuming that the system font is set to 12 point size, this is equivalent to the point size.',),
(u'TextStyle',u'StyleBits',u'Y',0,15,None, None, None, None, u'A combination of style bits.',),
(u'TypeLib',u'Description',u'Y',None, None, None, None, u'Text',None, None, ),
(u'TypeLib',u'Feature_',u'N',None, None, u'Feature',1,u'Identifier',None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the type library to be operational.',),
(u'TypeLib',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
(u'TypeLib',u'Directory_',u'Y',None, None, u'Directory',1,u'Identifier',None, u'Optional. The foreign key into the Directory table denoting the path to the help file for the type library.',),
(u'TypeLib',u'Language',u'N',0,32767,None, None, None, None, u'The language of the library.',),
(u'TypeLib',u'Version',u'Y',0,16777215,None, None, None, None, u'The version of the library. The minor version is in the lower 8 bits of the integer. The major version is in the next 16 bits. ',),
(u'TypeLib',u'Cost',u'Y',0,2147483647,None, None, None, None, u'The cost associated with the registration of the typelib. This column is currently optional.',),
(u'TypeLib',u'LibID',u'N',None, None, None, None, u'Guid',None, u'The GUID that represents the library.',),
(u'UIText',u'Text',u'Y',None, None, None, None, u'Text',None, u'The localized version of the string.',),
(u'UIText',u'Key',u'N',None, None, None, None, u'Identifier',None, u'A unique key that identifies the particular string.',),
(u'Upgrade',u'Attributes',u'N',0,2147483647,None, None, None, None, u'The attributes of this product set.',),
(u'Upgrade',u'Language',u'Y',None, None, None, None, u'Language',None, u'A comma-separated list of languages for either products in this set or products not in this set.',),
(u'Upgrade',u'ActionProperty',u'N',None, None, None, None, u'UpperCase',None, u'The property to set when a product in this set is found.',),
(u'Upgrade',u'Remove',u'Y',None, None, None, None, u'Formatted',None, u'The list of features to remove when uninstalling a product from this set. The default is "ALL".',),
(u'Upgrade',u'UpgradeCode',u'N',None, None, None, None, u'Guid',None, u'The UpgradeCode GUID belonging to the products in this set.',),
(u'Upgrade',u'VersionMax',u'Y',None, None, None, None, u'Text',None, u'The maximum ProductVersion of the products in this set. The set may or may not include products with this particular version.',),
(u'Upgrade',u'VersionMin',u'Y',None, None, None, None, u'Text',None, u'The minimum ProductVersion of the products in this set. The set may or may not include products with this particular version.',),
(u'Verb',u'Sequence',u'Y',0,32767,None, None, None, None, u'Order within the verbs for a particular extension. Also used simply to specify the default verb.',),
(u'Verb',u'Argument',u'Y',None, None, None, None, u'Formatted',None, u'Optional value for the command arguments.',),
(u'Verb',u'Extension_',u'N',None, None, u'Extension',1,u'Text',None, u'The extension associated with the table row.',),
(u'Verb',u'Verb',u'N',None, None, None, None, u'Text',None, u'The verb for the command.',),
(u'Verb',u'Command',u'Y',None, None, None, None, u'Formatted',None, u'The command text.',),
]
| apache-2.0 |
dzan/xenOnArm | tools/python/xen/xm/cpupool-new.py | 41 | 1702 | #============================================================================
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#============================================================================
# Copyright (C) 2009 Fujitsu Technology Solutions
#============================================================================
""" Create a new managed cpupool.
"""
import sys
from xen.xm.main import serverType, SERVER_XEN_API, server
from xen.xm.cpupool import parseCommandLine, err, help as help_options
from xen.util.sxputils import sxp2map
def help():
return help_options()
def main(argv):
try:
(opts, config) = parseCommandLine(argv)
except StandardError, ex:
err(str(ex))
if not opts:
return
if serverType == SERVER_XEN_API:
record = sxp2map(config)
if type(record.get('proposed_CPUs', [])) != list:
record['proposed_CPUs'] = [record['proposed_CPUs']]
server.xenapi.cpu_pool.create(record)
else:
server.xend.cpu_pool.new(config)
if __name__ == '__main__':
main(sys.argv)
| gpl-2.0 |
chhe/streamlink | tests/test_utils_named_pipe.py | 3 | 5763 | import threading
import unittest
from unittest.mock import Mock, call, patch
from streamlink.compat import is_win32
from streamlink.utils.named_pipe import NamedPipe, NamedPipePosix, NamedPipeWindows
if is_win32:
from ctypes import windll, create_string_buffer, c_ulong, byref
GENERIC_READ = 0x80000000
OPEN_EXISTING = 3
class ReadNamedPipeThread(threading.Thread):
def __init__(self, pipe: NamedPipe):
super().__init__(daemon=True)
self.path = str(pipe.path)
self.error = None
self.data = b""
self.done = threading.Event()
def run(self):
try:
self.read()
except OSError as err: # pragma: no cover
self.error = err
self.done.set()
def read(self):
raise NotImplementedError
class ReadNamedPipeThreadPosix(ReadNamedPipeThread):
def read(self):
with open(self.path, "rb") as file:
while True:
data = file.read(-1)
if len(data) == 0:
break
self.data += data
class ReadNamedPipeThreadWindows(ReadNamedPipeThread):
def read(self):
handle = windll.kernel32.CreateFileW(self.path, GENERIC_READ, 0, None, OPEN_EXISTING, 0, None)
try:
while True:
data = create_string_buffer(NamedPipeWindows.bufsize)
read = c_ulong(0)
if not windll.kernel32.ReadFile(handle, data, NamedPipeWindows.bufsize, byref(read), None): # pragma: no cover
raise OSError(f"Failed reading pipe: {windll.kernel32.GetLastError()}")
self.data += data.value
if read.value != len(data.value):
break
finally:
windll.kernel32.CloseHandle(handle)
class TestNamedPipe(unittest.TestCase):
@patch("streamlink.utils.named_pipe._id", 0)
@patch("streamlink.utils.named_pipe.os.getpid", Mock(return_value=12345))
@patch("streamlink.utils.named_pipe.random.randint", Mock(return_value=67890))
@patch("streamlink.utils.named_pipe.NamedPipe._create", Mock(return_value=None))
@patch("streamlink.utils.named_pipe.log")
def test_name(self, mock_log):
NamedPipe()
NamedPipe()
self.assertEqual(mock_log.info.mock_calls, [
call("Creating pipe streamlinkpipe-12345-1-67890"),
call("Creating pipe streamlinkpipe-12345-2-67890")
])
@unittest.skipIf(is_win32, "test only applicable on a POSIX OS")
class TestNamedPipePosix(unittest.TestCase):
def test_export(self):
self.assertEqual(NamedPipe, NamedPipePosix)
@patch("streamlink.utils.named_pipe.os.mkfifo")
def test_create(self, mock_mkfifo):
mock_mkfifo.side_effect = OSError()
with self.assertRaises(OSError):
NamedPipePosix()
self.assertEqual(mock_mkfifo.call_args[0][1:], (0o660,))
def test_close_before_open(self):
pipe = NamedPipePosix()
self.assertTrue(pipe.path.is_fifo())
pipe.close()
self.assertFalse(pipe.path.is_fifo())
# closing twice doesn't raise
pipe.close()
def test_write_before_open(self):
pipe = NamedPipePosix()
self.assertTrue(pipe.path.is_fifo())
with self.assertRaises(Exception):
pipe.write(b"foo")
pipe.close()
def test_named_pipe(self):
pipe = NamedPipePosix()
self.assertTrue(pipe.path.is_fifo())
reader = ReadNamedPipeThreadPosix(pipe)
reader.start()
pipe.open()
self.assertEqual(pipe.write(b"foo"), 3)
self.assertEqual(pipe.write(b"bar"), 3)
pipe.close()
self.assertFalse(pipe.path.is_fifo())
reader.done.wait(4000)
self.assertEqual(reader.error, None)
self.assertEqual(reader.data, b"foobar")
self.assertFalse(reader.is_alive())
@unittest.skipIf(not is_win32, "test only applicable on Windows")
class TestNamedPipeWindows(unittest.TestCase):
def test_export(self):
self.assertEqual(NamedPipe, NamedPipeWindows)
@patch("streamlink.utils.named_pipe.windll.kernel32")
def test_create(self, mock_kernel32):
mock_kernel32.CreateNamedPipeW.return_value = NamedPipeWindows.INVALID_HANDLE_VALUE
mock_kernel32.GetLastError.return_value = 12345
with self.assertRaises(OSError) as cm:
NamedPipeWindows()
self.assertEqual(str(cm.exception), "Named pipe error code 0x00003039")
self.assertEqual(mock_kernel32.CreateNamedPipeW.call_args[0][1:], (
0x00000002,
0x00000000,
255,
8192,
8192,
0,
None
))
def test_close_before_open(self):
pipe = NamedPipeWindows()
handle = windll.kernel32.CreateFileW(str(pipe.path), GENERIC_READ, 0, None, OPEN_EXISTING, 0, None)
self.assertNotEqual(handle, NamedPipeWindows.INVALID_HANDLE_VALUE)
windll.kernel32.CloseHandle(handle)
pipe.close()
handle = windll.kernel32.CreateFileW(str(pipe.path), GENERIC_READ, 0, None, OPEN_EXISTING, 0, None)
self.assertEqual(handle, NamedPipeWindows.INVALID_HANDLE_VALUE)
# closing twice doesn't raise
pipe.close()
def test_named_pipe(self):
pipe = NamedPipeWindows()
reader = ReadNamedPipeThreadWindows(pipe)
reader.start()
pipe.open()
self.assertEqual(pipe.write(b"foo"), 3)
self.assertEqual(pipe.write(b"bar"), 3)
self.assertEqual(pipe.write(b"\0"), 1)
reader.done.wait(4000)
self.assertEqual(reader.error, None)
self.assertEqual(reader.data, b"foobar")
self.assertFalse(reader.is_alive())
pipe.close()
| bsd-2-clause |
malcolmcrossley/sm | drivers/LVHDoISCSISR.py | 1 | 27645 | #!/usr/bin/python
#
# Copyright (C) Citrix Systems Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# LVHDoISCSISR: LVHD over ISCSI software initiator SR driver
#
import SR, VDI, LVHDSR, ISCSISR, SRCommand, util, scsiutil, lvutil
import statvfs, time
import os, socket, sys, re
import xs_errors
import xmlrpclib
import mpath_cli, iscsilib
import glob, copy
import mpp_luncheck
import scsiutil
import xml.dom.minidom
CAPABILITIES = ["SR_PROBE", "SR_UPDATE", "SR_METADATA", "SR_TRIM",
"VDI_CREATE", "VDI_DELETE", "VDI_ATTACH", "VDI_DETACH",
"VDI_GENERATE_CONFIG", "VDI_CLONE", "VDI_SNAPSHOT",
"VDI_RESIZE", "ATOMIC_PAUSE", "VDI_RESET_ON_BOOT/2",
"VDI_UPDATE"]
CONFIGURATION = [ [ 'SCSIid', 'The scsi_id of the destination LUN' ], \
[ 'target', 'IP address or hostname of the iSCSI target' ], \
[ 'targetIQN', 'The IQN of the target LUN group to be attached' ], \
[ 'chapuser', 'The username to be used during CHAP authentication' ], \
[ 'chappassword', 'The password to be used during CHAP authentication' ], \
[ 'incoming_chapuser', 'The incoming username to be used during bi-directional CHAP authentication (optional)' ], \
[ 'incoming_chappassword', 'The incoming password to be used during bi-directional CHAP authentication (optional)' ], \
[ 'port', 'The network port number on which to query the target' ], \
[ 'multihomed', 'Enable multi-homing to this target, true or false (optional, defaults to same value as host.other_config:multipathing)' ], \
[ 'usediscoverynumber', 'The specific iscsi record index to use. (optional)' ], \
[ 'allocation', 'Valid values are thick or thin (optional, defaults to thick)'] ]
DRIVER_INFO = {
'name': 'LVHD over iSCSI',
'description': 'SR plugin which represents disks as Logical Volumes within a Volume Group created on an iSCSI LUN',
'vendor': 'Citrix Systems Inc',
'copyright': '(C) 2008 Citrix Systems Inc',
'driver_version': '1.0',
'required_api_version': '1.0',
'capabilities': CAPABILITIES,
'configuration': CONFIGURATION
}
class LVHDoISCSISR(LVHDSR.LVHDSR):
"""LVHD over ISCSI storage repository"""
def handles(type):
if __name__ == '__main__':
name = sys.argv[0]
else:
name = __name__
if name.endswith("LVMoISCSISR"):
return type == "lvmoiscsi"
if type == "lvhdoiscsi":
return True
return False
handles = staticmethod(handles)
def load(self, sr_uuid):
if not sr_uuid:
# This is a probe call, generate a temp sr_uuid
sr_uuid = util.gen_uuid()
driver = SR.driver('iscsi')
if self.original_srcmd.dconf.has_key('target'):
self.original_srcmd.dconf['targetlist'] = self.original_srcmd.dconf['target']
iscsi = driver(self.original_srcmd, sr_uuid)
self.iscsiSRs = []
self.iscsiSRs.append(iscsi)
if self.dconf['target'].find(',') == 0 or self.dconf['targetIQN'] == "*":
# Instantiate multiple sessions
self.iscsiSRs = []
if self.dconf['targetIQN'] == "*":
IQN = "any"
else:
IQN = self.dconf['targetIQN']
dict = {}
IQNstring = ""
IQNs = []
try:
if self.dconf.has_key('multiSession'):
IQNs = self.dconf['multiSession'].split("|")
for IQN in IQNs:
if IQN:
dict[IQN] = ""
else:
try:
IQNs.remove(IQN)
except:
# Exceptions are not expected but just in case
pass
# Order in multiSession must be preserved. It is important for dual-controllers.
# IQNstring cannot be built with a dictionary iteration because of this
IQNstring = self.dconf['multiSession']
else:
for tgt in self.dconf['target'].split(','):
try:
tgt_ip = util._convertDNS(tgt)
except:
raise xs_errors.XenError('DNSError')
iscsilib.ensure_daemon_running_ok(iscsi.localIQN)
map = iscsilib.discovery(tgt_ip,iscsi.port,iscsi.chapuser,iscsi.chappassword,targetIQN=IQN)
util.SMlog("Discovery for IP %s returned %s" % (tgt,map))
for i in range(0,len(map)):
(portal,tpgt,iqn) = map[i]
(ipaddr, port) = iscsilib.parse_IP_port(portal)
try:
util._testHost(ipaddr, long(port), 'ISCSITarget')
except:
util.SMlog("Target Not reachable: (%s:%s)" % (ipaddr, port))
continue
key = "%s,%s,%s" % (ipaddr,port,iqn)
dict[key] = ""
# Again, do not mess up with IQNs order. Dual controllers will benefit from that
if IQNstring == "":
# Compose the IQNstring first
for key in dict.iterkeys(): IQNstring += "%s|" % key
# Reinitialize and store iterator
key_iterator = dict.iterkeys()
else:
key_iterator = IQNs
# Now load the individual iSCSI base classes
for key in key_iterator:
(ipaddr,port,iqn) = key.split(',')
srcmd_copy = copy.deepcopy(self.original_srcmd)
srcmd_copy.dconf['target'] = ipaddr
srcmd_copy.dconf['targetIQN'] = iqn
srcmd_copy.dconf['multiSession'] = IQNstring
util.SMlog("Setting targetlist: %s" % srcmd_copy.dconf['targetlist'])
self.iscsiSRs.append(driver(srcmd_copy, sr_uuid))
pbd = util.find_my_pbd(self.session, self.host_ref, self.sr_ref)
if pbd <> None and not self.dconf.has_key('multiSession'):
dconf = self.session.xenapi.PBD.get_device_config(pbd)
dconf['multiSession'] = IQNstring
self.session.xenapi.PBD.set_device_config(pbd, dconf)
except:
util.logException("LVHDoISCSISR.load")
self.iscsi = self.iscsiSRs[0]
# Be extremely careful not to throw exceptions here since this function
# is the main one used by all operations including probing and creating
pbd = None
try:
pbd = util.find_my_pbd(self.session, self.host_ref, self.sr_ref)
except:
pass
# Apart from the upgrade case, user must specify a SCSIid
if not self.dconf.has_key('SCSIid'):
# Dual controller issue
self.LUNs = {} # Dict for LUNs from all the iscsi objects
for ii in range(0, len(self.iscsiSRs)):
self.iscsi = self.iscsiSRs[ii]
self._LUNprint(sr_uuid)
for key in self.iscsi.LUNs:
self.LUNs[key] = self.iscsi.LUNs[key]
self.print_LUNs_XML()
self.iscsi = self.iscsiSRs[0] # back to original value
raise xs_errors.XenError('ConfigSCSIid')
self.SCSIid = self.dconf['SCSIid']
# This block checks if the first iscsi target contains the right SCSIid.
# If not it scans the other iscsi targets because chances are that more
# than one controller is present
dev_match = False
forced_login = False
# No need to check if only one iscsi target is present
if len(self.iscsiSRs) == 1:
pass
else:
target_success = False
attempt_discovery = False
for iii in range(0, len(self.iscsiSRs)):
# Check we didn't leave any iscsi session open
# If exceptions happened before, the cleanup function has worked on the right target.
if forced_login == True:
try:
iscsilib.ensure_daemon_running_ok(self.iscsi.localIQN)
iscsilib.logout(self.iscsi.target, self.iscsi.targetIQN)
forced_login = False
except:
raise xs_errors.XenError('ISCSILogout')
self.iscsi = self.iscsiSRs[iii]
util.SMlog("path %s" %self.iscsi.path)
util.SMlog("iscsci data: targetIQN %s, portal %s" % (self.iscsi.targetIQN, self.iscsi.target))
iscsilib.ensure_daemon_running_ok(self.iscsi.localIQN)
if not iscsilib._checkTGT(self.iscsi.targetIQN):
attempt_discovery = True
try:
# Ensure iscsi db has been populated
map = iscsilib.discovery(
self.iscsi.target,
self.iscsi.port,
self.iscsi.chapuser,
self.iscsi.chappassword,
targetIQN=self.iscsi.targetIQN)
if len(map) == 0:
util.SMlog("Discovery for iscsi data targetIQN %s,"
" portal %s returned empty list"
" Trying another path if available" %
(self.iscsi.targetIQN,
self.iscsi.target))
continue
except:
util.SMlog("Discovery failed for iscsi data targetIQN"
" %s, portal %s. Trying another path if"
" available" %
(self.iscsi.targetIQN, self.iscsi.target))
continue
try:
iscsilib.login(self.iscsi.target,
self.iscsi.targetIQN,
self.iscsi.chapuser,
self.iscsi.chappassword,
self.iscsi.incoming_chapuser,
self.iscsi.incoming_chappassword,
self.mpath == "true")
except:
util.SMlog("Login failed for iscsi data targetIQN %s,"
" portal %s. Trying another path"
" if available" %
(self.iscsi.targetIQN, self.iscsi.target))
continue
target_success = True;
forced_login = True
# A session should be active.
if not util.wait_for_path(self.iscsi.path, ISCSISR.MAX_TIMEOUT):
util.SMlog("%s has no associated LUNs" % self.iscsi.targetIQN)
continue
scsiid_path = "/dev/disk/by-id/scsi-" + self.SCSIid
if not util.wait_for_path(scsiid_path, ISCSISR.MAX_TIMEOUT):
util.SMlog("%s not found" %scsiid_path)
continue
for file in filter(self.iscsi.match_lun, util.listdir(self.iscsi.path)):
lun_path = os.path.join(self.iscsi.path,file)
lun_dev = scsiutil.getdev(lun_path)
try:
lun_scsiid = scsiutil.getSCSIid(lun_dev)
except:
util.SMlog("getSCSIid failed on %s in iscsi %s: LUN"
" offline or iscsi path down" %
(lun_dev, self.iscsi.path))
continue
util.SMlog("dev from lun %s %s" %(lun_dev, lun_scsiid))
if lun_scsiid == self.SCSIid:
util.SMlog("lun match in %s" %self.iscsi.path)
dev_match = True
# No more need to raise ISCSITarget exception.
# Resetting attempt_discovery
attempt_discovery = False
break
if dev_match:
if iii == 0:
break
util.SMlog("IQN reordering needed")
new_iscsiSRs = []
IQNs = {}
IQNstring = ""
# iscsiSRs can be seen as a circular buffer: the head now is the matching one
for kkk in range(iii, len(self.iscsiSRs)) + range(0, iii):
new_iscsiSRs.append(self.iscsiSRs[kkk])
ipaddr = self.iscsiSRs[kkk].target
port = self.iscsiSRs[kkk].port
iqn = self.iscsiSRs[kkk].targetIQN
key = "%s,%s,%s" % (ipaddr,port,iqn)
# The final string must preserve the order without repetition
if not IQNs.has_key(key):
IQNs[key] = ""
IQNstring += "%s|" % key
util.SMlog("IQNstring is now %s" %IQNstring)
self.iscsiSRs = new_iscsiSRs
util.SMlog("iqn %s is leading now" %self.iscsiSRs[0].targetIQN)
# Updating pbd entry, if any
try:
pbd = util.find_my_pbd(self.session, self.host_ref, self.sr_ref)
if pbd <> None and self.dconf.has_key('multiSession'):
util.SMlog("Updating multiSession in PBD")
dconf = self.session.xenapi.PBD.get_device_config(pbd)
dconf['multiSession'] = IQNstring
self.session.xenapi.PBD.set_device_config(pbd, dconf)
except:
pass
break
if not target_success and attempt_discovery:
raise xs_errors.XenError('ISCSITarget')
# Check for any unneeded open iscsi sessions
if forced_login == True:
try:
iscsilib.ensure_daemon_running_ok(self.iscsi.localIQN)
iscsilib.logout(self.iscsi.target, self.iscsi.targetIQN)
forced_login = False
except:
raise xs_errors.XenError('ISCSILogout')
self._pathrefresh(LVHDoISCSISR)
LVHDSR.LVHDSR.load(self, sr_uuid)
def print_LUNs_XML(self):
dom = xml.dom.minidom.Document()
element = dom.createElement("iscsi-target")
dom.appendChild(element)
for uuid in self.LUNs:
val = self.LUNs[uuid]
entry = dom.createElement('LUN')
element.appendChild(entry)
for attr in ('vendor', 'serial', 'LUNid', \
'size', 'SCSIid'):
try:
aval = getattr(val, attr)
except AttributeError:
continue
if aval:
subentry = dom.createElement(attr)
entry.appendChild(subentry)
textnode = dom.createTextNode(str(aval))
subentry.appendChild(textnode)
print >>sys.stderr,dom.toprettyxml()
def _getSCSIid_from_LUN(self, sr_uuid):
was_attached = True
self.iscsi.attach(sr_uuid)
dev = self.dconf['LUNid'].split(',')
if len(dev) > 1:
raise xs_errors.XenError('LVMOneLUN')
path = os.path.join(self.iscsi.path,"LUN%s" % dev[0])
if not util.wait_for_path(path, ISCSISR.MAX_TIMEOUT):
util.SMlog("Unable to detect LUN attached to host [%s]" % path)
try:
SCSIid = scsiutil.getSCSIid(path)
except:
raise xs_errors.XenError('InvalidDev')
self.iscsi.detach(sr_uuid)
return SCSIid
def _LUNprint(self, sr_uuid):
if self.iscsi.attached:
# Force a rescan on the bus.
self.iscsi.refresh()
# time.sleep(5)
# Now call attach (handles the refcounting + session activa)
self.iscsi.attach(sr_uuid)
util.SMlog("LUNprint: waiting for path: %s" % self.iscsi.path)
if util.wait_for_path("%s/LUN*" % self.iscsi.path, ISCSISR.MAX_TIMEOUT):
try:
adapter=self.iscsi.adapter[self.iscsi.address]
util.SMlog("adapter=%s" % adapter)
# find a scsi device on which to issue a report luns command:
devs=glob.glob("%s/LUN*" % self.iscsi.path)
sgdevs = []
for i in devs:
sgdevs.append(int(i.split("LUN")[1]))
sgdevs.sort()
sgdev = "%s/LUN%d" % (self.iscsi.path,sgdevs[0])
# issue a report luns:
luns=util.pread2(["/usr/bin/sg_luns","-q",sgdev]).split('\n')
nluns=len(luns)-1 # remove the line relating to the final \n
# check if the LUNs are MPP-RDAC Luns
scsi_id = scsiutil.getSCSIid(sgdev)
mpp_lun = False
if (mpp_luncheck.is_RdacLun(scsi_id)):
mpp_lun = True
link=glob.glob('/dev/disk/by-scsibus/%s-*' % scsi_id)
mpp_adapter = link[0].split('/')[-1].split('-')[-1].split(':')[0]
# make sure we've got that many sg devices present
for i in range(0,30):
luns=scsiutil._dosgscan()
sgdevs=filter(lambda r: r[1]==adapter, luns)
if mpp_lun:
sgdevs.extend(filter(lambda r: r[1]==mpp_adapter, luns))
if len(sgdevs)>=nluns:
util.SMlog("Got all %d sg devices" % nluns)
break
else:
util.SMlog("Got %d sg devices - expecting %d" % (len(sgdevs),nluns))
time.sleep(1)
if os.path.exists("/sbin/udevsettle"):
util.pread2(["/sbin/udevsettle"])
else:
util.pread2(["/sbin/udevadm","settle"])
except:
util.SMlog("Generic exception caught. Pass")
pass # Make sure we don't break the probe...
self.iscsi.print_LUNs()
self.iscsi.detach(sr_uuid)
def create(self, sr_uuid, size):
# Check SCSIid not already in use by other PBDs
if util.test_SCSIid(self.session, sr_uuid, self.SCSIid):
raise xs_errors.XenError('SRInUse')
self.iscsi.attach(sr_uuid)
try:
if not self.iscsi._attach_LUN_bySCSIid(self.SCSIid):
# UPGRADE FROM GEORGE: take care of ill-formed SCSIid
upgraded = False
matchSCSIid = False
for file in filter(self.iscsi.match_lun, util.listdir(self.iscsi.path)):
path = os.path.join(self.iscsi.path,file)
if not util.wait_for_path(path, ISCSISR.MAX_TIMEOUT):
util.SMlog("Unable to detect LUN attached to host [%s]" % path)
continue
try:
SCSIid = scsiutil.getSCSIid(path)
except:
continue
try:
matchSCSIid = scsiutil.compareSCSIid_2_6_18(self.SCSIid, path)
except:
continue
if (matchSCSIid):
util.SMlog("Performing upgrade from George")
try:
pbd = util.find_my_pbd(self.session, self.host_ref, self.sr_ref)
device_config = self.session.xenapi.PBD.get_device_config(pbd)
device_config['SCSIid'] = SCSIid
self.session.xenapi.PBD.set_device_config(pbd, device_config)
self.dconf['SCSIid'] = SCSIid
self.SCSIid = self.dconf['SCSIid']
except:
continue
if not self.iscsi._attach_LUN_bySCSIid(self.SCSIid):
raise xs_errors.XenError('InvalidDev')
else:
upgraded = True
break
else:
util.SMlog("Not a matching LUN, skip ... scsi_id is: %s" % SCSIid)
continue
if not upgraded:
raise xs_errors.XenError('InvalidDev')
self._pathrefresh(LVHDoISCSISR)
LVHDSR.LVHDSR.create(self, sr_uuid, size)
except Exception, inst:
self.iscsi.detach(sr_uuid)
raise xs_errors.XenError("SRUnavailable", opterr=inst)
self.iscsi.detach(sr_uuid)
def delete(self, sr_uuid):
LVHDSR.LVHDSR.delete(self, sr_uuid)
for i in self.iscsiSRs:
i.detach(sr_uuid)
def attach(self, sr_uuid):
if ('allocation' in self.sm_config and
self.sm_config['allocation'] == 'xlvhd'):
self._write_vginfo(sr_uuid)
try:
connected = False
for i in self.iscsiSRs:
try:
i.attach(sr_uuid)
except SR.SROSError, inst:
# Some iscsi objects can fail login but not all. Storing exception
if inst.errno == 141:
util.SMlog("Connection failed for target %s, continuing.." %i.target)
stored_exception = inst
continue
else:
raise
else:
connected = True
# Check if at least on iscsi succeeded
if not connected:
raise stored_exception
if not i._attach_LUN_bySCSIid(self.SCSIid):
raise xs_errors.XenError('InvalidDev')
if self.dconf.has_key('multiSession'):
# Force a manual bus refresh
for a in self.iscsi.adapter:
scsiutil.rescan([self.iscsi.adapter[a]])
self._start_xenvmd(sr_uuid)
self._pathrefresh(LVHDoISCSISR)
LVHDSR.LVHDSR.attach(self, sr_uuid)
self._symlink_xenvm_conf()
except Exception, inst:
for i in self.iscsiSRs:
i.detach(sr_uuid)
raise xs_errors.XenError("SRUnavailable", opterr=inst)
self._setMultipathableFlag(SCSIid=self.SCSIid)
self._start_local_allocator(sr_uuid)
def detach(self, sr_uuid):
LVHDSR.LVHDSR.detach(self, sr_uuid)
for i in self.iscsiSRs:
i.detach(sr_uuid)
def probe(self):
self.uuid = util.gen_uuid()
# When multipathing is enabled, since we don't refcount the multipath maps,
# we should not attempt to do the iscsi.attach/detach when the map is already present,
# as this will remove it (which may well be in use).
if self.mpath == 'true' and self.dconf.has_key('SCSIid'):
maps = []
mpp_lun = False
try:
if (mpp_luncheck.is_RdacLun(self.dconf['SCSIid'])):
mpp_lun = True
link=glob.glob('/dev/disk/mpInuse/%s-*' % self.dconf['SCSIid'])
else:
maps = mpath_cli.list_maps()
except:
pass
if (mpp_lun):
if (len(link)):
raise xs_errors.XenError('SRInUse')
else:
if self.dconf['SCSIid'] in maps:
raise xs_errors.XenError('SRInUse')
self.iscsi.attach(self.uuid)
if not self.iscsi._attach_LUN_bySCSIid(self.SCSIid):
util.SMlog("Unable to detect LUN")
raise xs_errors.XenError('InvalidDev')
self._pathrefresh(LVHDoISCSISR)
out = LVHDSR.LVHDSR.probe(self)
self.iscsi.detach(self.uuid)
return out
def vdi(self, uuid):
return LVHDoISCSIVDI(self, uuid)
class LVHDoISCSIVDI(LVHDSR.LVHDVDI):
def generate_config(self, sr_uuid, vdi_uuid):
util.SMlog("LVHDoISCSIVDI.generate_config")
if not lvutil._checkLV(self.path):
raise xs_errors.XenError('VDIUnavailable')
if self.sr.sm_config['allocation'] == "xlvhd":
lvutil.flushLV(self.path)
dict = {}
self.sr.dconf['localIQN'] = self.sr.iscsi.localIQN
self.sr.dconf['multipathing'] = self.sr.mpath
self.sr.dconf['multipathhandle'] = self.sr.mpathhandle
dict['device_config'] = self.sr.dconf
if dict['device_config'].has_key('chappassword_secret'):
s = util.get_secret(self.session, dict['device_config']['chappassword_secret'])
del dict['device_config']['chappassword_secret']
dict['device_config']['chappassword'] = s
dict['sr_uuid'] = sr_uuid
dict['vdi_uuid'] = vdi_uuid
dict['allocation'] = self.sr.sm_config['allocation']
dict['command'] = 'vdi_attach_from_config'
# Return the 'config' encoded within a normal XMLRPC response so that
# we can use the regular response/error parsing code.
config = xmlrpclib.dumps(tuple([dict]), "vdi_attach_from_config")
return xmlrpclib.dumps((config,), "", True)
def attach_from_config(self, sr_uuid, vdi_uuid):
util.SMlog("LVHDoISCSIVDI.attach_from_config")
try:
self.sr.iscsi.attach(sr_uuid)
if not self.sr.iscsi._attach_LUN_bySCSIid(self.sr.SCSIid):
raise xs_errors.XenError('InvalidDev')
return LVHDSR.LVHDVDI.attach(self, sr_uuid, vdi_uuid)
except:
util.logException("LVHDoISCSIVDI.attach_from_config")
raise xs_errors.XenError('SRUnavailable', \
opterr='Unable to attach the heartbeat disk')
if __name__ == '__main__':
SRCommand.run(LVHDoISCSISR, DRIVER_INFO)
else:
SR.registerSR(LVHDoISCSISR)
| lgpl-2.1 |
timohtey/mediadrop_copy | mediacore_env/Lib/re.py | 153 | 12959 | #
# Secret Labs' Regular Expression Engine
#
# re-compatible interface for the sre matching engine
#
# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
#
# This version of the SRE library can be redistributed under CNRI's
# Python 1.6 license. For any other use, please contact Secret Labs
# AB (info@pythonware.com).
#
# Portions of this engine have been developed in cooperation with
# CNRI. Hewlett-Packard provided funding for 1.6 integration and
# other compatibility work.
#
r"""Support for regular expressions (RE).
This module provides regular expression matching operations similar to
those found in Perl. It supports both 8-bit and Unicode strings; both
the pattern and the strings being processed can contain null bytes and
characters outside the US ASCII range.
Regular expressions can contain both special and ordinary characters.
Most ordinary characters, like "A", "a", or "0", are the simplest
regular expressions; they simply match themselves. You can
concatenate ordinary characters, so last matches the string 'last'.
The special characters are:
"." Matches any character except a newline.
"^" Matches the start of the string.
"$" Matches the end of the string or just before the newline at
the end of the string.
"*" Matches 0 or more (greedy) repetitions of the preceding RE.
Greedy means that it will match as many repetitions as possible.
"+" Matches 1 or more (greedy) repetitions of the preceding RE.
"?" Matches 0 or 1 (greedy) of the preceding RE.
*?,+?,?? Non-greedy versions of the previous three special characters.
{m,n} Matches from m to n repetitions of the preceding RE.
{m,n}? Non-greedy version of the above.
"\\" Either escapes special characters or signals a special sequence.
[] Indicates a set of characters.
A "^" as the first character indicates a complementing set.
"|" A|B, creates an RE that will match either A or B.
(...) Matches the RE inside the parentheses.
The contents can be retrieved or matched later in the string.
(?iLmsux) Set the I, L, M, S, U, or X flag for the RE (see below).
(?:...) Non-grouping version of regular parentheses.
(?P<name>...) The substring matched by the group is accessible by name.
(?P=name) Matches the text matched earlier by the group named name.
(?#...) A comment; ignored.
(?=...) Matches if ... matches next, but doesn't consume the string.
(?!...) Matches if ... doesn't match next.
(?<=...) Matches if preceded by ... (must be fixed length).
(?<!...) Matches if not preceded by ... (must be fixed length).
(?(id/name)yes|no) Matches yes pattern if the group with id/name matched,
the (optional) no pattern otherwise.
The special sequences consist of "\\" and a character from the list
below. If the ordinary character is not on the list, then the
resulting RE will match the second character.
\number Matches the contents of the group of the same number.
\A Matches only at the start of the string.
\Z Matches only at the end of the string.
\b Matches the empty string, but only at the start or end of a word.
\B Matches the empty string, but not at the start or end of a word.
\d Matches any decimal digit; equivalent to the set [0-9].
\D Matches any non-digit character; equivalent to the set [^0-9].
\s Matches any whitespace character; equivalent to [ \t\n\r\f\v].
\S Matches any non-whitespace character; equiv. to [^ \t\n\r\f\v].
\w Matches any alphanumeric character; equivalent to [a-zA-Z0-9_].
With LOCALE, it will match the set [0-9_] plus characters defined
as letters for the current locale.
\W Matches the complement of \w.
\\ Matches a literal backslash.
This module exports the following functions:
match Match a regular expression pattern to the beginning of a string.
search Search a string for the presence of a pattern.
sub Substitute occurrences of a pattern found in a string.
subn Same as sub, but also return the number of substitutions made.
split Split a string by the occurrences of a pattern.
findall Find all occurrences of a pattern in a string.
finditer Return an iterator yielding a match object for each match.
compile Compile a pattern into a RegexObject.
purge Clear the regular expression cache.
escape Backslash all non-alphanumerics in a string.
Some of the functions in this module takes flags as optional parameters:
I IGNORECASE Perform case-insensitive matching.
L LOCALE Make \w, \W, \b, \B, dependent on the current locale.
M MULTILINE "^" matches the beginning of lines (after a newline)
as well as the string.
"$" matches the end of lines (before a newline) as well
as the end of the string.
S DOTALL "." matches any character at all, including the newline.
X VERBOSE Ignore whitespace and comments for nicer looking RE's.
U UNICODE Make \w, \W, \b, \B, dependent on the Unicode locale.
This module also defines an exception 'error'.
"""
import sys
import sre_compile
import sre_parse
# public symbols
__all__ = [ "match", "search", "sub", "subn", "split", "findall",
"compile", "purge", "template", "escape", "I", "L", "M", "S", "X",
"U", "IGNORECASE", "LOCALE", "MULTILINE", "DOTALL", "VERBOSE",
"UNICODE", "error" ]
__version__ = "2.2.1"
# flags
I = IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE # ignore case
L = LOCALE = sre_compile.SRE_FLAG_LOCALE # assume current 8-bit locale
U = UNICODE = sre_compile.SRE_FLAG_UNICODE # assume unicode locale
M = MULTILINE = sre_compile.SRE_FLAG_MULTILINE # make anchors look for newline
S = DOTALL = sre_compile.SRE_FLAG_DOTALL # make dot match newline
X = VERBOSE = sre_compile.SRE_FLAG_VERBOSE # ignore whitespace and comments
# sre extensions (experimental, don't rely on these)
T = TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE # disable backtracking
DEBUG = sre_compile.SRE_FLAG_DEBUG # dump pattern after compilation
# sre exception
error = sre_compile.error
# --------------------------------------------------------------------
# public interface
def match(pattern, string, flags=0):
"""Try to apply the pattern at the start of the string, returning
a match object, or None if no match was found."""
return _compile(pattern, flags).match(string)
def search(pattern, string, flags=0):
"""Scan through string looking for a match to the pattern, returning
a match object, or None if no match was found."""
return _compile(pattern, flags).search(string)
def sub(pattern, repl, string, count=0, flags=0):
"""Return the string obtained by replacing the leftmost
non-overlapping occurrences of the pattern in string by the
replacement repl. repl can be either a string or a callable;
if a string, backslash escapes in it are processed. If it is
a callable, it's passed the match object and must return
a replacement string to be used."""
return _compile(pattern, flags).sub(repl, string, count)
def subn(pattern, repl, string, count=0, flags=0):
"""Return a 2-tuple containing (new_string, number).
new_string is the string obtained by replacing the leftmost
non-overlapping occurrences of the pattern in the source
string by the replacement repl. number is the number of
substitutions that were made. repl can be either a string or a
callable; if a string, backslash escapes in it are processed.
If it is a callable, it's passed the match object and must
return a replacement string to be used."""
return _compile(pattern, flags).subn(repl, string, count)
def split(pattern, string, maxsplit=0, flags=0):
"""Split the source string by the occurrences of the pattern,
returning a list containing the resulting substrings."""
return _compile(pattern, flags).split(string, maxsplit)
def findall(pattern, string, flags=0):
"""Return a list of all non-overlapping matches in the string.
If one or more groups are present in the pattern, return a
list of groups; this will be a list of tuples if the pattern
has more than one group.
Empty matches are included in the result."""
return _compile(pattern, flags).findall(string)
if sys.hexversion >= 0x02020000:
__all__.append("finditer")
def finditer(pattern, string, flags=0):
"""Return an iterator over all non-overlapping matches in the
string. For each match, the iterator returns a match object.
Empty matches are included in the result."""
return _compile(pattern, flags).finditer(string)
def compile(pattern, flags=0):
"Compile a regular expression pattern, returning a pattern object."
return _compile(pattern, flags)
def purge():
"Clear the regular expression cache"
_cache.clear()
_cache_repl.clear()
def template(pattern, flags=0):
"Compile a template pattern, returning a pattern object"
return _compile(pattern, flags|T)
_alphanum = frozenset(
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
def escape(pattern):
"Escape all non-alphanumeric characters in pattern."
s = list(pattern)
alphanum = _alphanum
for i, c in enumerate(pattern):
if c not in alphanum:
if c == "\000":
s[i] = "\\000"
else:
s[i] = "\\" + c
return pattern[:0].join(s)
# --------------------------------------------------------------------
# internals
_cache = {}
_cache_repl = {}
_pattern_type = type(sre_compile.compile("", 0))
_MAXCACHE = 100
def _compile(*key):
# internal: compile pattern
cachekey = (type(key[0]),) + key
p = _cache.get(cachekey)
if p is not None:
return p
pattern, flags = key
if isinstance(pattern, _pattern_type):
if flags:
raise ValueError('Cannot process flags argument with a compiled pattern')
return pattern
if not sre_compile.isstring(pattern):
raise TypeError, "first argument must be string or compiled pattern"
try:
p = sre_compile.compile(pattern, flags)
except error, v:
raise error, v # invalid expression
if len(_cache) >= _MAXCACHE:
_cache.clear()
_cache[cachekey] = p
return p
def _compile_repl(*key):
# internal: compile replacement pattern
p = _cache_repl.get(key)
if p is not None:
return p
repl, pattern = key
try:
p = sre_parse.parse_template(repl, pattern)
except error, v:
raise error, v # invalid expression
if len(_cache_repl) >= _MAXCACHE:
_cache_repl.clear()
_cache_repl[key] = p
return p
def _expand(pattern, match, template):
# internal: match.expand implementation hook
template = sre_parse.parse_template(template, pattern)
return sre_parse.expand_template(template, match)
def _subx(pattern, template):
# internal: pattern.sub/subn implementation helper
template = _compile_repl(template, pattern)
if not template[0] and len(template[1]) == 1:
# literal replacement
return template[1][0]
def filter(match, template=template):
return sre_parse.expand_template(template, match)
return filter
# register myself for pickling
import copy_reg
def _pickle(p):
return _compile, (p.pattern, p.flags)
copy_reg.pickle(_pattern_type, _pickle, _compile)
# --------------------------------------------------------------------
# experimental stuff (see python-dev discussions for details)
class Scanner:
def __init__(self, lexicon, flags=0):
from sre_constants import BRANCH, SUBPATTERN
self.lexicon = lexicon
# combine phrases into a compound pattern
p = []
s = sre_parse.Pattern()
s.flags = flags
for phrase, action in lexicon:
p.append(sre_parse.SubPattern(s, [
(SUBPATTERN, (len(p)+1, sre_parse.parse(phrase, flags))),
]))
s.groups = len(p)+1
p = sre_parse.SubPattern(s, [(BRANCH, (None, p))])
self.scanner = sre_compile.compile(p)
def scan(self, string):
result = []
append = result.append
match = self.scanner.scanner(string).match
i = 0
while 1:
m = match()
if not m:
break
j = m.end()
if i == j:
break
action = self.lexicon[m.lastindex-1][1]
if hasattr(action, '__call__'):
self.match = m
action = action(self, m.group())
if action is not None:
append(action)
i = j
return result, string[i:]
| gpl-3.0 |
Germanika/plover | plover/oslayer/log_dbus.py | 2 | 1337 |
import logging
import dbus
from plover import log, __name__ as __software_name__
APPNAME = __software_name__.capitalize()
SERVICE = 'org.freedesktop.Notifications'
INTERFACE = '/org/freedesktop/Notifications'
class DbusNotificationHandler(logging.Handler):
""" Handler using DBus notifications to show messages. """
def __init__(self):
super(DbusNotificationHandler, self).__init__()
self._bus = dbus.SessionBus()
self._proxy = self._bus.get_object(SERVICE, INTERFACE)
self._notify = dbus.Interface(self._proxy, SERVICE)
self.setLevel(log.WARNING)
self.setFormatter(log.NoExceptionTracebackFormatter('<b>%(levelname)s:</b> %(message)s'))
def emit(self, record):
level = record.levelno
message = self.format(record)
if message.endswith('\n'):
message = message[:-1]
if level <= log.INFO:
timeout = 10
urgency = 0
elif level <= log.WARNING:
timeout = 15
urgency = 1
else:
timeout = 0
urgency = 2
self._notify.Notify(APPNAME, 0, '', # replaces_id, app_icon
APPNAME, message, '', # actions
{ 'urgency': dbus.Byte(urgency) },
timeout * 1000)
| gpl-2.0 |
vritant/subscription-manager | src/subscription_manager/base_plugin.py | 3 | 1448 | #
# Copyright (c) 2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
class SubManPlugin(object):
"""Base class for all subscription-manager "rhsm-plugins"
Plugins need to subclass SubManPlugin() to be found
"""
name = None
conf = None
# if all_slots is set, the plugin will get registered to all slots
# it is up to the plugin to handle providing callables
all_slots = None
# did we have hooks that match provided slots? aka
# is this plugin going to be used
found_slots_for_hooks = False
def __init__(self, conf=None):
if conf:
self.conf = conf
if self.conf is None:
raise TypeError("SubManPlugin can not be constructed with conf=None")
def __str__(self):
return self.name or self.__class__.__name__
@classmethod
def get_plugin_key(cls):
return ".".join([cls.__module__, cls.__name__])
| gpl-2.0 |
redhat-openstack/horizon | openstack_dashboard/dashboards/admin/defaults/tabs.py | 82 | 1417 | # Copyright 2013 Kylin, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tabs
from openstack_dashboard.usage import quotas
from openstack_dashboard.dashboards.admin.defaults import tables
class DefaultQuotasTab(tabs.TableTab):
table_classes = (tables.QuotasTable,)
name = _("Default Quotas")
slug = "quotas"
template_name = ("horizon/common/_detail_table.html")
def get_quotas_data(self):
request = self.tab_group.request
try:
data = quotas.get_default_quota_data(request)
except Exception:
data = []
exceptions.handle(self.request, _('Unable to get quota info.'))
return data
class DefaultsTabs(tabs.TabGroup):
slug = "defaults"
tabs = (DefaultQuotasTab,)
sticky = True
| apache-2.0 |