repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Ultimaker/Uranium | UM/TaskManagement/HttpRequestManager.py | 1 | 24890 | # Copyright (c) 2020 Ultimaker B.V.
# Uranium is released under the terms of the LGPLv3 or higher.
import json
import time
import uuid
from collections import deque
from threading import RLock
from typing import Callable, cast, Dict, Set, Union, Optional, Any
from PyQt5.QtCore import QObject, QUrl, Qt, pyqtSignal, pyqtProperty
from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkRequest, QNetworkReply
from UM.Logger import Logger
from UM.TaskManagement.HttpRequestData import HttpRequestData
from UM.TaskManagement.HttpRequestScope import HttpRequestScope
from UM.TaskManagement.TaskManager import TaskManager
#
# Summary:
#
# HttpRequestManager is a wrapper for Qt's QNetworkAccessManager and make it more convenient to do the following things:
# (1) Keep track of the HTTP requests one has issued. This is done via the HttpRequestData object. Each HttpRequestData
# object represents an issued HTTP request.
# (2) A request can be aborted if it hasn't been issued to QNetworkAccessManager or if it's still running by
# QNetworkAccessManager.
# (3) Updates on each request is done via user-specified callback functions. So, for each request, you can give
# optional callbacks:
# - A successful callback, invoked when the request has been finished successfully.
# - An error callback, invoked when an error has occurred, including when a request was aborted by the user or
# timed out.
# - A download progress callback, invoked when there's an update on the download progress.
# - An upload progress callback, invoked when there's an update on the upload progress.
# (4) An optional timeout can be specified for an HTTP request. Note that this timeout is the max wait time between
# each time the request gets a response from the server. This is handled via the download and upload progress
# callbacks. A QTimer is used for each request to track its timeout if set. If the timer gets triggered and there
# is indeed a timeout, the request will be aborted. All requests that are aborted due to a timeout will result in
# invoking its error callback with an error code QNetworkReply::OperationCanceledError, but the HttpRequestData
# will have its "is_aborted_due_to_timeout" property set to True.
# Because of
#
# All requests are handled by QNetworkAccessManager. We consider that all the requests that are being handled by
# QNetworkAccessManager at a certain point are running concurrently.
#
#
# A dedicated manager that processes and schedules HTTP requests. It provides public APIs for issuing HTTP requests
# and the results, successful or not, will be communicated back via callback functions. For each request, 2 callback
# functions can be optionally specified:
#
# - callback: This function will be invoked when a request finishes. (bound to QNetworkReply.finished signal)
# Its signature should be "def callback(QNetworkReply) -> None" or other compatible form.
#
# - error_callback: This function will be invoked when a request fails. (bound to QNetworkReply.error signal)
# Its signature should be "def callback(QNetworkReply, QNetworkReply.NetworkError) -> None" or other compatible
# form.
#
# - download_progress_callback: This function will be invoked whenever the download progress changed. (bound to
# QNetworkReply.downloadProgress signal)
# Its signature should be "def callback(bytesReceived: int, bytesTotal: int) -> None" or other compatible form.
#
# - upload_progress_callback: This function will be invoked whenever the upload progress changed. (bound to
# QNetworkReply.downloadProgress signal)
# Its signature should be "def callback(bytesSent: int, bytesTotal: int) -> None" or other compatible form.
#
# - timeout (EXPERIMENTAL): The timeout is seconds for a request. This is the timeout since the request was first
# issued to the QNetworkManager. NOTE that this timeout is NOT the timeout between each response from the other
# party, but the timeout for the complete request. So, if you have a very slow network which takes 2 hours to
# download a 1MB file, and for this request you set a timeout of 10 minutes, the request will be aborted after
# 10 minutes if it's not finished.
#
class HttpRequestManager(TaskManager):
__instance = None # type: Optional[HttpRequestManager]
internetReachableChanged = pyqtSignal(bool)
@classmethod
def getInstance(cls, *args, **kwargs) -> "HttpRequestManager":
if cls.__instance is None:
cls.__instance = cls(*args, **kwargs)
return cls.__instance
def __init__(self, max_concurrent_requests: int = 4, parent: Optional["QObject"] = None,
enable_request_benchmarking: bool = False) -> None:
if HttpRequestManager.__instance is not None:
raise RuntimeError("Try to create singleton '%s' more than once" % self.__class__.__name__)
HttpRequestManager.__instance = self
super().__init__(parent)
self._network_manager = QNetworkAccessManager(self)
self._account_manager = None
self._is_internet_reachable = True
# All the requests that have been issued to the QNetworkManager are considered as running concurrently. This
# number defines the max number of requests that will be issued to the QNetworkManager.
self._max_concurrent_requests = max_concurrent_requests
# A FIFO queue for the pending requests.
self._request_queue = deque() # type: deque
# A set of all currently in progress requests
self._requests_in_progress = set() # type: Set[HttpRequestData]
self._request_lock = RLock()
self._process_requests_scheduled = False
# Debug options
#
# Enabling benchmarking will make the manager to time how much time it takes for a request from start to finish
# and log them.
self._enable_request_benchmarking = enable_request_benchmarking
@pyqtProperty(bool, notify = internetReachableChanged)
def isInternetReachable(self) -> bool:
return self._is_internet_reachable
# Public API for creating an HTTP GET request.
# Returns an HttpRequestData instance that represents this request.
def get(self, url: str,
headers_dict: Optional[Dict[str, str]] = None,
callback: Optional[Callable[["QNetworkReply"], None]] = None,
error_callback: Optional[Callable[["QNetworkReply", "QNetworkReply.NetworkError"], None]] = None,
download_progress_callback: Optional[Callable[[int, int], None]] = None,
upload_progress_callback: Optional[Callable[[int, int], None]] = None,
timeout: Optional[float] = None,
scope: Optional[HttpRequestScope] = None) -> "HttpRequestData":
return self._createRequest("get", url, headers_dict = headers_dict,
callback = callback, error_callback = error_callback,
download_progress_callback = download_progress_callback,
upload_progress_callback = upload_progress_callback,
timeout = timeout,
scope = scope)
# Public API for creating an HTTP PUT request.
# Returns an HttpRequestData instance that represents this request.
def put(self, url: str,
headers_dict: Optional[Dict[str, str]] = None,
data: Optional[Union[bytes, bytearray]] = None,
callback: Optional[Callable[["QNetworkReply"], None]] = None,
error_callback: Optional[Callable[["QNetworkReply", "QNetworkReply.NetworkError"], None]] = None,
download_progress_callback: Optional[Callable[[int, int], None]] = None,
upload_progress_callback: Optional[Callable[[int, int], None]] = None,
timeout: Optional[float] = None,
scope: Optional[HttpRequestScope] = None) -> "HttpRequestData":
return self._createRequest("put", url, headers_dict = headers_dict, data = data,
callback = callback, error_callback = error_callback,
download_progress_callback = download_progress_callback,
upload_progress_callback = upload_progress_callback,
timeout = timeout,
scope = scope)
# Public API for creating an HTTP POST request. Returns a unique request ID for this request.
# Returns an HttpRequestData instance that represents this request.
def post(self, url: str,
headers_dict: Optional[Dict[str, str]] = None,
data: Optional[Union[bytes, bytearray]] = None,
callback: Optional[Callable[["QNetworkReply"], None]] = None,
error_callback: Optional[Callable[["QNetworkReply", "QNetworkReply.NetworkError"], None]] = None,
download_progress_callback: Optional[Callable[[int, int], None]] = None,
upload_progress_callback: Optional[Callable[[int, int], None]] = None,
timeout: Optional[float] = None,
scope: Optional[HttpRequestScope] = None) -> "HttpRequestData":
return self._createRequest("post", url, headers_dict = headers_dict, data = data,
callback = callback, error_callback = error_callback,
download_progress_callback = download_progress_callback,
upload_progress_callback = upload_progress_callback,
timeout = timeout,
scope = scope)
# Public API for creating an HTTP DELETE request.
# Returns an HttpRequestData instance that represents this request.
def delete(self, url: str,
headers_dict: Optional[Dict[str, str]] = None,
callback: Optional[Callable[["QNetworkReply"], None]] = None,
error_callback: Optional[Callable[["QNetworkReply", "QNetworkReply.NetworkError"], None]] = None,
download_progress_callback: Optional[Callable[[int, int], None]] = None,
upload_progress_callback: Optional[Callable[[int, int], None]] = None,
timeout: Optional[float] = None,
scope: Optional[HttpRequestScope] = None) -> "HttpRequestData":
return self._createRequest("deleteResource", url, headers_dict=headers_dict,
callback=callback, error_callback=error_callback,
download_progress_callback=download_progress_callback,
upload_progress_callback=upload_progress_callback,
timeout=timeout,
scope=scope)
# Public API for aborting a given HttpRequestData. If the request is not pending or in progress, nothing
# will be done.
def abortRequest(self, request: "HttpRequestData") -> None:
with self._request_lock:
# If the request is currently pending, just remove it from the pending queue.
if request in self._request_queue:
self._request_queue.remove(request)
# If the request is currently in progress, abort it.
if request in self._requests_in_progress:
if request.reply is not None and request.reply.isRunning():
request.reply.abort()
Logger.log("d", "%s aborted", request)
@staticmethod
def readJSON(reply: QNetworkReply) -> Any:
""" Read a Json response into a Python object (list, dict, str depending on json type)
:return: Python object representing the Json or None in case of error
"""
try:
return json.loads(HttpRequestManager.readText(reply))
except json.decoder.JSONDecodeError:
Logger.log("w", "Received invalid JSON: " + str(reply.url()))
return None
@staticmethod
def readText(reply: QNetworkReply) -> str:
"""Decode raw reply bytes as utf-8"""
return bytes(reply.readAll()).decode("utf-8")
@staticmethod
def replyIndicatesSuccess(reply: QNetworkReply, error: Optional["QNetworkReply.NetworkError"] = None) -> bool:
"""Returns whether reply status code indicates success and error is None"""
return error is None and 200 <= reply.attribute(QNetworkRequest.HttpStatusCodeAttribute) < 300
@staticmethod
def safeHttpStatus(reply: Optional[QNetworkReply]):
"""Returns the status code or -1 if there isn't any"""
if reply is None:
return -1
return reply.attribute(QNetworkRequest.HttpStatusCodeAttribute) or -1
@staticmethod
def qt_network_error_name(error: QNetworkReply.NetworkError):
"""String representation of a NetworkError, eg 'ProtocolInvalidOperationError'"""
for k, v in QNetworkReply.__dict__.items():
if v == error:
return k
return "Unknown Qt Network error"
# This function creates a HttpRequestData with the given data and puts it into the pending request queue.
# If no request processing call has been scheduled, it will schedule it too.
# Returns an HttpRequestData instance that represents this request.
def _createRequest(self, http_method: str, url: str,
headers_dict: Optional[Dict[str, str]] = None,
data: Optional[Union[bytes, bytearray]] = None,
callback: Optional[Callable[["QNetworkReply"], None]] = None,
error_callback: Optional[Callable[["QNetworkReply", "QNetworkReply.NetworkError"], None]] = None,
download_progress_callback: Optional[Callable[[int, int], None]] = None,
upload_progress_callback: Optional[Callable[[int, int], None]] = None,
timeout: Optional[float] = None,
scope: Optional[HttpRequestScope] = None ) -> "HttpRequestData":
# Sanity checks
if timeout is not None and timeout <= 0:
raise ValueError("Timeout must be a positive number if provided, but [%s] was given" % timeout)
request = QNetworkRequest(QUrl(url))
# Make sure that Qt handles redirects
if hasattr(QNetworkRequest, "FollowRedirectsAttribute"):
# Patch for Qt 5.6-5.8
request.setAttribute(QNetworkRequest.FollowRedirectsAttribute, True)
if hasattr(QNetworkRequest, "RedirectPolicyAttribute"):
# Patch for Qt 5.9+
request.setAttribute(QNetworkRequest.RedirectPolicyAttribute, True)
# Set headers
if headers_dict is not None:
for key, value in headers_dict.items():
request.setRawHeader(key.encode("utf-8"), value.encode("utf-8"))
if scope is not None:
scope.requestHook(request)
# Generate a unique request ID
request_id = uuid.uuid4().hex
# Create the request data
request_data = HttpRequestData(request_id,
http_method = http_method,
request = request,
data = data,
manager_timeout_callback = self._onRequestTimeout,
callback = callback,
error_callback = error_callback,
download_progress_callback = download_progress_callback,
upload_progress_callback = upload_progress_callback,
timeout = timeout)
with self._request_lock:
self._request_queue.append(request_data)
# Schedule a call to process pending requests in the queue
if not self._process_requests_scheduled:
self.callLater(0, self._processNextRequestsInQueue)
self._process_requests_scheduled = True
return request_data
# For easier debugging, so you know when the call is triggered by the timeout timer.
def _onRequestTimeout(self, request_data: "HttpRequestData") -> None:
Logger.log("d", "Request [%s] timeout.", self)
# Make typing happy
if request_data.reply is None:
return
with self._request_lock:
if request_data not in self._requests_in_progress:
return
request_data.reply.abort()
request_data.is_aborted_due_to_timeout = True
# Processes the next requests in the pending queue. This function will issue as many requests to the QNetworkManager
# as possible but limited by the value "_max_concurrent_requests". It stops if there is no more pending requests.
def _processNextRequestsInQueue(self) -> None:
# Process all requests until the max concurrent number is hit or there's no more requests to process.
while True:
with self._request_lock:
# Do nothing if there's no more requests to process
if not self._request_queue:
self._process_requests_scheduled = False
return
# Do not exceed the max request limit
if len(self._requests_in_progress) >= self._max_concurrent_requests:
self._process_requests_scheduled = False
return
# Fetch the next request and process
next_request_data = self._request_queue.popleft()
self._processRequest(cast(HttpRequestData, next_request_data))
# Processes the given HttpRequestData by issuing the request using QNetworkAccessManager and moves the
# request into the currently in-progress list.
def _processRequest(self, request_data: "HttpRequestData") -> None:
now = time.time()
# Get the right http_method function and prepare arguments.
method = getattr(self._network_manager, request_data.http_method)
args = [request_data.request]
if request_data.data is not None:
args.append(request_data.data)
# Issue the request and add the reply into the currently in-progress requests set
reply = method(*args)
request_data.reply = reply
# Connect callback signals
reply.error.connect(lambda err, rd = request_data: self._onRequestError(rd, err), type = Qt.QueuedConnection)
reply.finished.connect(lambda rd = request_data: self._onRequestFinished(rd), type = Qt.QueuedConnection)
# Only connect download/upload progress callbacks when necessary to reduce CPU usage.
if request_data.download_progress_callback is not None or request_data.timeout is not None:
reply.downloadProgress.connect(request_data.onDownloadProgressCallback, type = Qt.QueuedConnection)
if request_data.upload_progress_callback is not None or request_data.timeout is not None:
reply.uploadProgress.connect(request_data.onUploadProgressCallback, type = Qt.QueuedConnection)
with self._request_lock:
self._requests_in_progress.add(request_data)
request_data.setStartTime(now)
def _onRequestError(self, request_data: "HttpRequestData", error: "QNetworkReply.NetworkError") -> None:
error_string = None
if request_data.reply is not None:
error_string = request_data.reply.errorString()
if error == QNetworkReply.UnknownNetworkError or QNetworkReply.HostNotFoundError:
self._setInternetReachable(False)
# manager seems not always able to recover from a total loss of network access, so re-create it
self._network_manager = QNetworkAccessManager(self)
# Use peek() to retrieve the reply's body instead of readAll(), because readAll consumes the content
reply_body = request_data.reply.peek(request_data.reply.bytesAvailable()) # unlike readAll(), peek doesn't consume the content
Logger.log("d", "%s got an QNetworkReplyError %s. The server returned: %s", request_data, error_string, reply_body)
with self._request_lock:
# Safeguard: make sure that we have the reply in the currently in-progress requests set
if request_data not in self._requests_in_progress:
# TODO: ERROR, should not happen
Logger.log("e", "%s not found in the in-progress set", request_data)
pass
else:
# Disconnect callback signals
if request_data.reply is not None:
if request_data.download_progress_callback is not None:
request_data.reply.downloadProgress.disconnect(request_data.onDownloadProgressCallback)
if request_data.upload_progress_callback is not None:
request_data.reply.uploadProgress.disconnect(request_data.onUploadProgressCallback)
request_data.setDone()
self._requests_in_progress.remove(request_data)
# Schedule the error callback if there is one
if request_data.error_callback is not None:
self.callLater(0, request_data.error_callback, request_data.reply, error)
# Continue to process the next request
self._processNextRequestsInQueue()
def _onRequestFinished(self, request_data: "HttpRequestData") -> None:
# See https://doc.qt.io/archives/qt-5.10/qnetworkreply.html#abort
# Calling QNetworkReply.abort() will also trigger finished(), so we need to know if a request was finished or
# aborted. This can be done by checking if the error is QNetworkReply.OperationCanceledError. If a request was
# aborted due to timeout, the request's HttpRequestData.is_aborted_due_to_timeout will be set to True.
#
# We do nothing if the request was aborted or and error was detected because an error callback will also
# be triggered by Qt.
reply = request_data.reply
if reply is not None:
reply_error = reply.error() # error() must only be called once
if reply_error != QNetworkReply.NoError:
if reply_error == QNetworkReply.OperationCanceledError:
Logger.log("d", "%s was aborted, do nothing", request_data)
# stop processing for any kind of error
return
# No error? Internet is reachable
self._setInternetReachable(True)
if self._enable_request_benchmarking:
time_spent = None # type: Optional[float]
if request_data.start_time is not None:
time_spent = time.time() - request_data.start_time
Logger.log("d", "Request [%s] finished, took %s seconds, pending for %s seconds",
request_data, time_spent, request_data.pending_time)
with self._request_lock:
# Safeguard: make sure that we have the reply in the currently in-progress requests set.
if request_data not in self._requests_in_progress:
# This can happen if a request has been aborted. The finished() signal will still be triggered at the
# end. In this case, do nothing with this request.
Logger.log("e", "%s not found in the in-progress set", request_data)
else:
# Disconnect callback signals
if reply is not None:
# Even after the request was successfully finished, an error may still be emitted if
# the network connection is lost seconds later. Bug in Qt? Fixes CURA-7349
reply.error.disconnect()
if request_data.download_progress_callback is not None:
reply.downloadProgress.disconnect(request_data.onDownloadProgressCallback)
if request_data.upload_progress_callback is not None:
reply.uploadProgress.disconnect(request_data.onUploadProgressCallback)
request_data.setDone()
self._requests_in_progress.remove(request_data)
# Schedule the callback if there is one
if request_data.callback is not None:
self.callLater(0, request_data.callback, reply)
# Continue to process the next request
self._processNextRequestsInQueue()
def _setInternetReachable(self, reachable: bool):
if reachable != self._is_internet_reachable:
self._is_internet_reachable = reachable
self.internetReachableChanged.emit(reachable)
| lgpl-3.0 | -6,409,155,076,828,808,000 | 52.991323 | 135 | 0.641583 | false | 4.630698 | false | false | false |
vongrippen/mempy-celery | main2.py | 1 | 1433 | from flask import Flask, request, render_template, redirect
from flask import send_from_directory
from mongoengine import Document, StringField, DateTimeField, connect
import os
import datetime
import tasks
app = Flask(__name__)
app.debug = True
class Entry(Document):
name = StringField()
zip = StringField()
timestamp = DateTimeField()
@app.route("/", methods=["GET", ])
def index():
"""List all the entries."""
all_entries = Entry.objects
return render_template("index.html", entries=all_entries)
@app.route("/new", methods=["GET", "POST", ])
def sign():
"""Allow users to create new entries."""
if request.method == "GET":
return render_template("new.html")
else:
the_zip = request.form["the_zip"]
current_time = datetime.datetime.now()
entry = Entry(
zip=the_zip,
timestamp=current_time
)
entry.save()
tasks.lookup.delay(entry.id)
return redirect("/") # Redirect after POST is Good Behavor!
@app.route("/styles/<path:filename>")
def styles(filename):
"""Allow Flask to server our CSS files."""
return send_from_directory("styles", filename)
if __name__ == "__main__":
host = "localhost"
port = int(os.getenv("PORT", 5000))
if port != 5000:
host = "0.0.0.0"
else:
connect("mempyflaskcelery") # A MongoDB connection
app.run(port=port, host=host)
| mit | 4,270,123,599,678,727,000 | 23.706897 | 69 | 0.624564 | false | 3.852151 | false | false | false |
bsmedberg/socorro | socorro/lib/transform_rules.py | 1 | 13761 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import re
import configman
import collections
import inspect
#------------------------------------------------------------------------------
# support methods
# a regular expression that will parse out all pairs in the form:
# a=b, c=d, e=f
kw_list_re = re.compile('([^ =]+) *= *("[^"]*"|[^ ]*)')
def kw_str_parse(a_string):
"""convert a string in the form 'a=b, c=d, e=f' to a dict"""
try:
return dict((k, eval(v.rstrip(',')))
for k, v in kw_list_re.findall(a_string))
except (AttributeError, TypeError):
if isinstance(a_string, collections.Mapping):
return a_string
return {}
#==============================================================================
class TransformRule(object):
"""a pairing of two functions with default parameters to be used as
transformation rule."""
#--------------------------------------------------------------------------
def __init__(self, predicate,
predicate_args,
predicate_kwargs,
action,
action_args,
action_kwargs):
"""construct a new predicate/action rule pair.
input parameters:
pedicate - the name of a function to serve as a predicate. The
function must accept two dicts followed by any number
of constant args or kwargs. Alternatively, this could
be a classname for a class that has a method called
'predicate' with the aforementioned charactistics
predicate_args - arguments to be passed on to the predicate
function in addition to the two required dicts.
predicate_kwargs - kwargs to be passed on to the predicate
function in addition to the two required dicts.
action - the name of a function to be run if the predicate returns
True. The method must accept two dicts followed by
any number of args or kwargs. Alternatively, this could
be a classname for a class that has a method called
'predicate' with the aforementioned charactistics
action_args - arguments to be passed on to the action function
in addition to the two required dicts
action_kwargs - kwargs to be passed on to the action function in
addition to the two required dicts
"""
try:
self.predicate = configman.converters.class_converter(predicate)
except TypeError:
# conversion failed, let's assume it was already function or a
# callable object
self.predicate = predicate
if inspect.isclass(self.predicate):
# the predicate is a class, instantiate it and set the predicate
# function to the object's 'predicate' method
self._predicitate_implementation = self.predicate()
self.predicate = self._predicitate_implementation.predicate
else:
self._predicitate_implementation = type(self.predicate)
try:
if predicate_args in ('', None):
self.predicate_args = ()
elif isinstance(predicate_args, tuple):
self.predicate_args = predicate_args
else:
self.predicate_args = tuple([eval(x.strip())
for x in predicate_args.split(',')])
except AttributeError:
self.predicate_args = ()
self.predicate_kwargs = kw_str_parse(predicate_kwargs)
try:
self.action = configman.class_converter(action)
except TypeError:
# the conversion failed, let's assume that the action was passed in
# as something callable.
self.action = action
if inspect.isclass(self.action):
# the action is actually a class, go on and instantiate it, then
# assign the 'action' to be the object's 'action' method
if self._predicitate_implementation.__class__ is self.action:
# if the predicate and the action are implemented in the same
# class, only instantiate one copy.
self._action_implementation = self._predicitate_implementation
else:
self._action_implementation = self.action()
self.action = self._action_implementation.action
try:
if action_args in ('', None):
self.action_args = ()
elif isinstance(action_args, tuple):
self.action_args = action_args
else:
self.action_args = tuple([eval(x.strip())
for x in action_args.split(',')])
except AttributeError:
self.action_args = ()
self.action_kwargs = kw_str_parse(action_kwargs)
#--------------------------------------------------------------------------
@staticmethod
def function_invocation_proxy(fn, proxy_args, proxy_kwargs):
"""execute the fuction if it is one, else evaluate the fn as a boolean
and return that value.
Sometimes rather than providing a predicate, we just give the value of
True. This is shorthand for writing a predicate that always returns
true."""
try:
return fn(*proxy_args, **proxy_kwargs)
except TypeError:
return bool(fn)
#--------------------------------------------------------------------------
def act(self, *args, **kwargs):
"""gather a rules parameters together and run the predicate. If that
returns True, then go on and run the action function
returns:
a tuple indicating the results of applying the predicate and the
action function:
(False, None) - the predicate failed, action function not run
(True, True) - the predicate and action functions succeeded
(True, False) - the predicate succeeded, but the action function
failed"""
pred_args = tuple(args) + tuple(self.predicate_args)
pred_kwargs = kwargs.copy()
pred_kwargs.update(self.predicate_kwargs)
if self.function_invocation_proxy(self.predicate,
pred_args,
pred_kwargs):
act_args = tuple(args) + tuple(self.action_args)
act_kwargs = kwargs.copy()
act_kwargs.update(self.action_kwargs)
bool_result = self.function_invocation_proxy(self.action, act_args,
act_kwargs)
return (True, bool_result)
else:
return (False, None)
#--------------------------------------------------------------------------
def __eq__(self, another):
if isinstance(another, TransformRule):
return self.__dict__ == another.__dict__
else:
return False
#==============================================================================
class TransformRuleSystem(object):
"""A collection of TransformRules that can be applied together"""
#--------------------------------------------------------------------------
def __init__(self):
self.rules = list()
#--------------------------------------------------------------------------
def load_rules(self, an_iterable):
"""cycle through a collection of Transform rule tuples loading them
into the TransformRuleSystem"""
self.rules = [TransformRule(*x) for x in an_iterable]
#--------------------------------------------------------------------------
def append_rules(self, an_iterable):
"""add rules to the TransformRuleSystem"""
self.rules.extend(TransformRule(*x) for x in an_iterable)
#--------------------------------------------------------------------------
def apply_all_rules(self, *args, **kwargs):
"""cycle through all rules and apply them all without regard to
success or failure
returns:
True - since success or failure is ignored"""
# print 'args:', args, 'kwargs:', kwargs
for x in self.rules:
x.act(*args, **kwargs)
return True
#--------------------------------------------------------------------------
def apply_until_action_succeeds(self, *args, **kwargs):
"""cycle through all rules until an action is run and succeeds
returns:
True - if an action is run and succeeds
False - if no action succeeds"""
for x in self.rules:
predicate_result, action_result = x.act(*args, **kwargs)
if action_result:
return True
return False
#--------------------------------------------------------------------------
def apply_until_action_fails(self, *args, **kwargs):
"""cycle through all rules until an action is run and fails
returns:
True - an action ran and it failed
False - no action ever failed"""
for x in self.rules:
predicate_result, action_result = x.act(*args, **kwargs)
if not action_result:
return True
return False
#--------------------------------------------------------------------------
def apply_until_predicate_succeeds(self, *args, **kwargs):
"""cycle through all rules until a predicate returns True
returns:
True - an action ran and it succeeded
False - an action ran and it failed
None - no predicate ever succeeded"""
for x in self.rules:
predicate_result, action_result = x.act(*args, **kwargs)
if predicate_result:
return action_result
return None
#--------------------------------------------------------------------------
def apply_until_predicate_fails(self, *args, **kwargs):
"""cycle through all rules until a predicate returns False
returns:
False - a predicate ran and it failed
None - no predicate ever failed"""
for x in self.rules:
predicate_result, action_result = x.act(*args, **kwargs)
if not predicate_result:
return False
return None
#------------------------------------------------------------------------------
# Useful rule predicates and actions
#------------------------------------------------------------------------------
# (True, '', '', copy_key_value, '', 'source_key=sally, destination_key=fred')
def copy_value_action(source, destination,
source_key=None, destination_key=None):
"""copy a key from a mapping source to a mapping destination"""
destination[destination_key] = source[source_key]
#------------------------------------------------------------------------------
# (True, '', '',
# format_new_value, '', 'destination_key='Version', format_str=%(Version)sesr'
# )
def format_new_value_action(source, destination, destination_key='',
format_str=''):
"""replace a mapping destination with a string formatted from the
mapping source.
parameters:
source - a mapping to use as a source
destination - a mapping to use as the destination
destination_key - the key in the destination to insert/replace
format - a string in standard python format form"""
destination[destination_key] = format_str % source
# (eq_constant_predicate, '', 'source_key="fred", value="wilma"', ...)
#------------------------------------------------------------------------------
def eq_constant_predicate(source, destination, source_key='', value=''):
"""a predicate to test equality between a source key and a constant
parameters:
source - the source of the value to test
destination - not used
source_key - the key into the source to use for the test
value - the constant to check for equality"""
return source[source_key] == value
# (eq_key_predicate, '', 'left_mapping_key="fred", right_mapping_key="wilma"',
# ...)
#------------------------------------------------------------------------------
def eq_key_predicate(left_mapping, right_mapping, left_mapping_key='',
right_mapping_key=''):
"""a predicate to test equality between a left mapping key and a
right mapping key
parameters:
left_mapping - the mapping containing the first value to test
right_mapping - the mapping containing the second value
left_mapping_key - the key into the source for the first value
right_mapping_key - the key into the second data source"""
return left_mapping[left_mapping_key] == right_mapping[right_mapping_key]
# (is_not_null_predicate, '', 'key="fred",
# ...)
#------------------------------------------------------------------------------
def is_not_null_predicate(source, other, key=''):
"""a predicate that converts the key'd source to boolean.
parameters:
source - the mapping containing the value to test
other - unused
key - the key into the source for the first value"""
try:
return bool(source[key])
except KeyError:
return False
| mpl-2.0 | -822,367,451,318,281,900 | 42.273585 | 79 | 0.51864 | false | 5.228343 | true | false | false |
jstarc/deep_reasoning | old_experiments.py | 1 | 6927 | # -*- coding: utf-8 -*-
"""
Created on Mon Sep 28 13:43:55 2015
@author: Janez
"""
import sys
sys.path.append('../keras')
import load_data
import models
import misc
import paraphrase
import numpy as np
import itertools
import os
if __name__ == "__main__":
train, dev, test = load_data.load_all_snli_datasets('data/snli_1.0/')
glove = load_data.import_glove('data/snli_vectors.txt')
for ex in train+dev:
load_data.load_word_vecs(ex[0] + ex[1], glove)
load_data.load_word_vec('EOS', glove)
wi = load_data.WordIndex(glove)
def grid_experiments(train, dev, glove, embed_size = 300, hidden_size = 100):
lr_vec = [0.001, 0.0003, 0.0001]
dropout_vec = [0.0, 0.1, 0.2]
reg_vec = [0.0, 0.001, 0.0003, 0.0001]
for params in itertools.product(lr_vec, dropout_vec, reg_vec):
filename = 'lr' + str(params[0]).replace('.','') + '_drop' + str(params[1]).replace('.','') + '_reg' + str(params[2]).replace('.','')
print 'Model', filename
model = models.init_model(embed_size, hidden_size, params[0], params[1], params[2])
models.train_model(train, dev, glove, model, 'models/' + filename)
def test_model2(model, dev, glove):
from misc import predict_example
tp = 0
for ex in dev:
probs = predict_example(" ".join(ex[0]), " ".join(ex[1]), model, glove)
label = load_data.LABEL_LIST[np.argmax(probs)]
if label == ex[2]:
tp +=1
return tp / float(len(dev))
def test_all_models(dev, test, glove, folder = 'models/'):
files = os.listdir(folder)
extless = set([file.split('.')[0] for file in files if os.path.isfile(file)]) - set([''])
epoch_less = set([file.split('~')[0] for file in extless])
for model_short in epoch_less:
if model_short in extless:
modelname = model_short
else:
same_exper = [m for m in extless if m.startswith(model_short)]
epoch_max = max([int(file.split('~')[1]) for file in same_exper])
modelname = model_short + '~' + str(epoch_max)
print modelname
model = models.load_model(folder + modelname)
dev_acc = models.test_model(model, dev, glove)
test_acc = models.test_model(model, test, glove)
print "Dev:", '{0:.2f}'.format(dev_acc * 100), "Test_acc:", '{0:.2f}'.format(test_acc * 100)
print
def accuracy_for_subset(y_pred, y_gold, subset):
pred = y_pred[subset]
gold = y_gold[subset]
return np.sum(np.argmax(pred, axis=1) == np.argmax(gold, axis=1)) / float(len(gold))
def augmented_dataset(glove, dataset, ppdb):
new_examples = []
for ex in dataset:
new_examples += augment_example(glove, ex, ppdb)
return new_examples
def augment_example(glove, example, ppdb):
new_examples = []
for word in set(example[0] + example[1]):
if word in ppdb:
for rep in ppdb[word]:
if word in glove and rep in glove:
new_examples.append(make_new_ex(example, word, rep))
return new_examples
def make_new_ex(example, original, replacement):
premise = [replacement if word == original else word for word in example[0]]
hypo = [replacement if word == original else word for word in example[1]]
return (premise, hypo, example[2])
def test_augmentation(glove, dev, ppdb_file):
ppdb = paraphrase.load_parap(ppdb_file)
aug = augmented_dataset(glove, dev, ppdb)
return aug
def parapharse_models(glove, train, dev, ppdb_file):
ppdb = paraphrase.load_parap(ppdb_file)
aug = augmented_dataset(glove, train, ppdb)
train_aug = train + aug
models.train_model(train_aug, dev, glove, model_filename = 'models/train_aug')
models.train_model(train, dev, glove, model_filename = 'models/train_noaug')
def tune_model(observed_example, train_example, model, glove):
class_arg = load_data.LABEL_LIST.index(observed_example[2])
prem = " ".join(observed_example[0])
hypo = " ".join(observed_example[1])
print prem, hypo, observed_example[2], class_arg
for i in range(30):
probs = misc.predict_example(prem, hypo, model, glove)[0]
print i, probs
if probs.argmax() == class_arg:
break
models.update_model_once(model, glove, [train_example])
def generate_tautologies(dataset):
unique = set()
result = []
for ex in dataset:
premise = " ".join(ex[0])
if premise not in unique:
result.append((ex[0], ex[0], 'entailment'))
unique.add(premise)
return result
def generate_contradictions(dataset):
result = []
for ex in dataset:
if ex[2] == 'contradiction':
result.append((ex[1],ex[0],ex[2]))
return result
def generate_neutral(dataset):
result = []
for ex in dataset:
if ex[2] == 'entailment':
result.append((ex[1],ex[0],'neutral'))
return result
def generate_all(dataset):
return generate_tautologies(dataset) + generate_contradictions(dataset) + generate_neutral(dataset)
def unknown_words_analysis(train, dev):
train_words = set.union(*[set(ex[0]+ex[1]) for ex in train])
indices = [[],[]]
for i in range(len(dev)):
diff = len(set(dev[i][0] + dev[i][1]) - train_words)
if diff == 0:
indices[0].append(i)
else:
indices[1].append(i)
return indices
def color_analysis(dev):
COLORS = set(['black', 'blue', 'orange', 'white', 'yellow', 'green', 'pink', 'purple', 'red', 'brown', 'gray', 'grey'])
indices = [[],[]]
for i in range(len(dev)):
diff = len(set(dev[i][0] + dev[i][1]) & COLORS)
if diff == 0:
indices[0].append(i)
else:
indices[1].append(i)
return indices
def mixture_experiments(train, dev, glove, splits = 5):
for i in range(splits):
model_name = 'mixture' + str(i)
print 'Model', model_name
model = models.init_model()
div = len(train) / splits
models.train_model(train[:i*div] + train[(i+1)*div:splits*div], dev, glove, model, 'models/' + model_name)
def extended_tautologies(train, dev, glove):
augment_data = generate_all(train)
from random import shuffle
shuffle(augment_data)
augment_weight = [0, 0.05, 0.15, 0.5]
for w in augment_weight:
new_train = train + augment_data[:int(len(train)*w)]
str = str(w).replace('.','')
model = models.init_model()
models.train_model(new_train, dev, glove, model = model, model_dir = 'models/aug' + w_str)
def test_tautologies(train, dev, glove, paths = ['aug0','aug005','aug015','aug05']):
testsets = [dev, generate_tautologies(dev), generate_contradictions(dev), generate_neutral(dev)]
names = ['dev' , 'ent', 'contr' ,'neu']
for path in paths:
print path
model_path = misc.best_model_path('models/' + path)
model = models.load_model(model_path)
accs = [models.test_model(model, dataset, glove) for dataset in testsets]
for name, dataset, acc in zip (names, testsets, accs):
print name, acc, len(dataset)
| mit | -9,217,211,386,601,654,000 | 32.955882 | 134 | 0.623647 | false | 3.013049 | true | false | false |
bcgov/gwells | app/backend/aquifers/migrations/0030_vertical_aquifer_extents.py | 1 | 2338 | # Generated by Django 2.2.10 on 2020-02-13 22:29
from decimal import Decimal
import django.contrib.gis.db.models.fields
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import gwells.db_comments.model_mixins
class Migration(migrations.Migration):
dependencies = [
('wells', '0108_auto_20200213_1741'),
('aquifers', '0029_aquifer_area_20200206_1651'),
]
operations = [
migrations.CreateModel(
name='VerticalAquiferExtent',
fields=[
('create_user', models.CharField(max_length=60)),
('create_date', models.DateTimeField(default=django.utils.timezone.now)),
('update_user', models.CharField(max_length=60)),
('update_date', models.DateTimeField(default=django.utils.timezone.now)),
('id', models.AutoField(db_column='vertical_aquifer_extent_id', primary_key=True, serialize=False, verbose_name='VerticalAquiferExtent Resource Identifier')),
('geom', django.contrib.gis.db.models.fields.PointField(blank=False, null=False, srid=4326, verbose_name="Geo-referenced location of aquifer's depth")),
('start', models.DecimalField(blank=False, db_column='vertical_aquifer_extent_from', decimal_places=2, max_digits=7, null=False, validators=[django.core.validators.MinValueValidator(Decimal('0.00'))], verbose_name='From')),
('end', models.DecimalField(blank=True, db_column='vertical_aquifer_extent_to', decimal_places=2, max_digits=7, null=True, validators=[django.core.validators.MinValueValidator(Decimal('0.01'))], verbose_name='To')),
('aquifer', models.ForeignKey(blank=False, db_column='aquifer_id', null=False, on_delete=django.db.models.deletion.PROTECT, to='aquifers.Aquifer')),
('well', models.ForeignKey(blank=True, db_column='well_tag_number', null=True, on_delete=django.db.models.deletion.PROTECT, to='wells.Well')),
],
options={
'verbose_name_plural': 'VerticalAquiferExtent',
'db_table': 'vertical_aquifer_extents',
'ordering': ['start'],
},
bases=(models.Model, gwells.db_comments.model_mixins.DBComments),
),
]
| apache-2.0 | 8,714,971,717,302,226,000 | 56.02439 | 239 | 0.655689 | false | 3.664577 | false | false | false |
ElbaKramer/gpss-research | experiments/2014-02-18-GPSS-add-pl2.py | 4 | 1774 | Experiment(description='PL2 empiricism',
data_dir='../data/tsdlr-renamed/',
max_depth=10,
random_order=False,
k=1,
debug=False,
local_computation=False,
n_rand=9,
sd=2,
jitter_sd=0.1,
max_jobs=400,
verbose=False,
make_predictions=False,
skip_complete=True,
results_dir='../results/2014-02-18-GPSS-add-pl2/',
iters=250,
base_kernels='SE,Per,Lin,Const,Noise',
random_seed=2,
period_heuristic=3,
max_period_heuristic=5,
period_heuristic_type='min',
subset=True,
subset_size=250,
full_iters=10,
bundle_size=5,
additive_form=True,
mean='ff.MeanZero()', # Starting mean
kernel='ff.NoiseKernel()', # Starting kernel
lik='ff.LikGauss(sf=-np.Inf)', # Starting likelihood
score='pl2',
search_operators=[('A', ('+', 'A', 'B'), {'A': 'kernel', 'B': 'base'}),
('A', ('*', 'A', 'B'), {'A': 'kernel', 'B': 'base-not-const'}),
('A', ('*-const', 'A', 'B'), {'A': 'kernel', 'B': 'base-not-const'}),
('A', 'B', {'A': 'kernel', 'B': 'base'}),
('A', ('CP', 'd', 'A'), {'A': 'kernel', 'd' : 'dimension'}),
('A', ('CW', 'd', 'A'), {'A': 'kernel', 'd' : 'dimension'}),
('A', ('B', 'd', 'A'), {'A': 'kernel', 'd' : 'dimension'}),
('A', ('BL', 'd', 'A'), {'A': 'kernel', 'd' : 'dimension'}),
('A', ('None',), {'A': 'kernel'})]) | mit | 7,466,689,904,000,813,000 | 44.512821 | 98 | 0.390643 | false | 3.598377 | false | true | false |
PPKE-Bioinf/consensx.itk.ppke.hu | consensx/graph/correl_graph.py | 1 | 1656 | import numpy as np
import matplotlib.pyplot as plt
plt.switch_backend('Agg')
def correl_graph(my_path, calced, experimental, graph_name):
"""X axis -> experimental values, Y axis -> calculated values
"calced" is a dict containing values for residues (as keys)
"experimental" is a list containing STR record objects"""
min_calc = min(calced.values())
max_calc = max(calced.values())
exp_values = []
for record in experimental:
exp_values.append(record.value)
min_exp = min(exp_values)
max_exp = max(exp_values)
miny = min(min_calc, min_exp) # get minimum value
maxy = max(max_calc, max_exp) # get maximum value
exp_line, calc_line = [], []
for i, j in enumerate(calced.keys()): # fetch data from arguments
calc = calced[j]
exp = experimental[i].value
exp_line.append(exp)
calc_line.append(calc)
diag = []
margin = int(abs(miny - maxy) * 0.05)
if abs(miny - maxy) < 10:
margin = 0.3
elif abs(miny - maxy) < 2:
margin = 0.01
elif abs(miny - maxy) < 1:
margin = 0
maxy += margin
miny -= margin
for i in np.arange(miny, maxy * 1.42, 0.1): # draw graph diagonal
diag.append(i)
plt.figure(figsize=(6, 5), dpi=80)
plt.plot(diag, diag, linewidth=2.0, color='#FD6C6C', alpha=.7)
plt.plot(exp_line, calc_line, color='#027A8B', marker='o', linestyle='')
plt.axis([miny, maxy, miny, maxy])
plt.xlabel('experimental')
plt.ylabel('calculated')
plt.tight_layout(pad=1.08)
plt.savefig(my_path + "/" + graph_name, format="svg", transparent=True)
plt.close()
| mit | -5,308,761,669,135,066,000 | 28.052632 | 76 | 0.608696 | false | 3.196911 | false | false | false |
rustychris/stompy | stompy/spatial/algorithms.py | 1 | 1107 | import shapely.ops
from shapely import geometry
def cut_polygon(poly,line):
# slice the exterior separately from interior, then recombine
ext_sliced=poly.exterior.union( line )
ext_poly=geometry.Polygon(poly.exterior)
int_sliced=[ p.union(line)
for p in poly.interiors ]
ext_parts, _dangles,_cuts,_invalids = shapely.ops.polygonize_full( ext_sliced )
ext_parts=list(ext_parts) # so we can update entries
# It's possible to introduce some new area here - places where the cut line
# goes outside the exterior but forms a loop with the exterior.
ext_parts=[p_ext
for p_ext in ext_parts
if p_ext.intersection(ext_poly).area / p_ext.area > 0.99 ]
for p in int_sliced:
int_parts, _dangles,_cuts,_invalids = shapely.ops.polygonize_full( p )
# remove from an ext_part if there's overlap
for p_int in int_parts:
for i_ext, p_ext in enumerate(ext_parts):
if p_ext.intersects(p_int):
ext_parts[i_ext] = p_ext.difference(p_int)
return ext_parts
| mit | -3,559,079,356,101,786,600 | 37.172414 | 83 | 0.635953 | false | 3.459375 | false | false | false |
LPgenerator/django-robokassa | robokassa/migrations/0001_initial.py | 1 | 1475 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='SuccessNotification',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('InvId', models.IntegerField(verbose_name='\u041d\u043e\u043c\u0435\u0440 \u0437\u0430\u043a\u0430\u0437\u0430', db_index=True)),
('OutSum', models.CharField(max_length=15, verbose_name='\u0421\u0443\u043c\u043c\u0430')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='\u0414\u0430\u0442\u0430 \u0438 \u0432\u0440\u0435\u043c\u044f \u043f\u043e\u043b\u0443\u0447\u0435\u043d\u0438\u044f \u0443\u0432\u0435\u0434\u043e\u043c\u043b\u0435\u043d\u0438\u044f')),
],
options={
'verbose_name': '\u0423\u0432\u0435\u0434\u043e\u043c\u043b\u0435\u043d\u0438\u0435 \u043e\u0431 \u0443\u0441\u043f\u0435\u0448\u043d\u043e\u043c \u043f\u043b\u0430\u0442\u0435\u0436\u0435',
'verbose_name_plural': '\u0423\u0432\u0435\u0434\u043e\u043c\u043b\u0435\u043d\u0438\u044f \u043e\u0431 \u0443\u0441\u043f\u0435\u0448\u043d\u044b\u0445 \u043f\u043b\u0430\u0442\u0435\u0436\u0430\u0445 (ROBOKASSA)',
},
bases=(models.Model,),
),
]
| mit | 6,957,487,516,605,604,000 | 53.62963 | 273 | 0.656949 | false | 2.676951 | false | false | false |
leejz/meta-omics-scripts | query_ko_from_rast_annotations.py | 1 | 12027 | #!/usr/bin/env python
"""
--------------------------------------------------------------------------------
Created: Jackson Lee 9/27/14
This script reads in a tab delimited file of annotations and querys the KEGG
REST API to parse back the original KO ortholog of the entry. If the line
contains an EC reference, the script will first query each line in KEGG REST as:
http://rest.kegg.jp/find/genes/each+query+term+and+EC+d.d.d.d
e.g. 1-deoxy-D-xylulose 5-phosphate synthase (EC 2.2.1.7)
http://rest.kegg.jp/find/genes/1-deoxy-D-xylulose+5-phosphate+synthase+2.2.1.7
and save the output in query order with the format:
gmx:100301901 DXS1; 1-deoxy-D-xylulose 5-phosphate synthase 1; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7]
pvu:PHAVU_009G095900g hypothetical protein; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7]
pvu:PHAVU_006G159900g hypothetical protein; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7]
pvu:PHAVU_003G099600g hypothetical protein; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7]
pvu:PHAVU_003G148900g hypothetical protein; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7]
pvu:PHAVU_003G287800g hypothetical protein; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7]
pvu:PHAVU_003G287900g hypothetical protein; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7]
mtr:MTR_2g020590 1-deoxy-D-xylulose 5-phosphate synthase; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7]
mtr:MTR_3g107740 hypothetical protein; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7]
mtr:MTR_4g118640 1-deoxy-D-xylulose 5-phosphate synthase; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7]
mtr:MTR_8g068300 1-Deoxy-D-xylulose 5-phosphate synthase; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7]
mtr:MTR_8g068270 1-Deoxy-D-xylulose 5-phosphate synthase; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7]
mtr:MTR_8g068280 1-Deoxy-D-xylulose 5-phosphate synthase; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7]
This output will then be read in and queried for the exact search term and grab by regex
any Kterms in the string. These terms are aggregated and the top hit and score written out
regex: "1-deoxy-D-xylulose 5-phosphate synthase;" and "EC:2.2.1.7" for "; K\d{5}"
result:KO1662
Input file format:
(3R)-hydroxymyristoyl-[ACP] dehydratase (EC 4.2.1.-)
(R)-citramalate synthase (EC 2.3.1.182)
(S)-2-haloacid dehalogenase I (EC 3.8.1.2)
(S)-22C3-di-O-geranylgeranylglyceryl phosphate synthase
(S)-3-O-geranylgeranylglyceryl phosphate synthase
(Y14336) putative extracellular protein containing predicted 35aa signal peptide
1-acyl-sn-glycerol-3-phosphate acyltransferase (EC 2.3.1.51)
1-aminocyclopropane-1-carboxylate deaminase (EC 3.5.99.7)
1-deoxy-D-xylulose 5-phosphate reductoisomerase (EC 1.1.1.267)
1-deoxy-D-xylulose 5-phosphate synthase (EC 2.2.1.7)
Output
a translation table of terms and the KEGG REST output
1-deoxy-D-xylulose 5-phosphate synthase (EC 2.2.1.7)\tK01662\t5\t5
--------------------------------------------------------------------------------
usage: query_ko_from_rast_annotations.py -i in.file -d out.directory -o output.file
"""
#-------------------------------------------------------------------------------
#
#http thread pool code from: http://stackoverflow.com/questions/2632520/what-is-the-fastest-way-to-send-100-000-http-requests-in-python
#-------------------------------------------------------------------------------
#Header - Linkers, Libs, Constants
from string import strip
import os
import re
import collections
from argparse import ArgumentParser, RawDescriptionHelpFormatter
#import requests
from urlparse import urlparse
from threading import Thread
import httplib
import sys
from Queue import Queue
#-------------------------------------------------------------------------------
#function declarations
def doWork():
while not exitapp:
id, urlstring, queryline = q.get()
url = urlparse(urlstring)
if id % 100 == 0:
print 'Query: HTTP Thread: ' + str(id) + ' started.'
try:
conn = httplib.HTTPConnection(url.netloc)
conn.request("GET", url.path)
res = conn.getresponse()
if res.status == 200:
with open(outputdirectory + '/' + str(id) + '.KEGG_REST.txt', 'w') as restfile:
restfile.write(res.read())
restfile.close()
#print 'Thread: ' + str(id) + ' Query: ' + urlstring + ' ..... ' + res.reason + '\n'
searchfile.write(str(id) + '\t' + queryline + '\n')
else:
print 'HTTP error, Thread: ' + str(id) + ' with error: ' + res.reason
logfile.write(str(id) + '\t' + urlstring + '\t' + res.reason + '\n')
raise
except:
print 'Thread: ' + str(id) + '. Error. '
print sys.exc_info()[0]
q.task_done()
#-------------------------------------------------------------------------------
#Body
print "Running..."
if __name__ == '__main__':
parser = ArgumentParser(usage = "query_ko_from_rast_annotations.py -i \
in.file -d out.directory -o output.file",
description=__doc__,
formatter_class=RawDescriptionHelpFormatter)
parser.add_argument("-i", "--input_file", action="store",
dest="inputfilename", help="text input file")
parser.add_argument("-o", "--output_filename", action="store",
dest="outputfilename", help="text output file")
parser.add_argument("-d", "--output_directory", action="store",
dest="outputdirectory", help="text output file")
options = parser.parse_args()
mandatories = ["outputfilename","outputdirectory"]
for m in mandatories:
if not options.__dict__[m]:
print "\nError: Missing Arguments\n"
parser.print_help()
exit(-1)
outputdirectory = options.outputdirectory
ec_regex = '\d\.[\d\-]*\.[\d\-]*\.[\d\-]*'
#allow for only querying the KEGG REST API once
if options.__dict__['inputfilename']:
if not os.path.exists(outputdirectory):
os.makedirs(outputdirectory)
else:
print "\nError: Directory exists!\n"
parser.print_help()
exit(-1)
print "Querying KEGG REST API Service..."
inputfilename = options.inputfilename
api_template = "http://rest.kegg.jp/find/genes/"
infile_list = []
with open(inputfilename,'U') as infile:
infile_list = [line.strip() for line in infile]
infile.close()
# replace all 2C, %3B
infile_list = [line.replace('2C',',') for line in infile_list]
infile_list = [line.replace('%3B',';') for line in infile_list]
urlpool = []
for line in infile_list:
if re.search('EC ' + ec_regex, line) != None:
#format string for search
query = line.strip()
#remove ec
ecnum_list = re.findall('EC ' + ec_regex,query)
ecnum_list = [ecline[3:] for ecline in ecnum_list]
query = re.sub(' \(EC ' + ec_regex + '\)', '', query)
#remove url syntax issues
query = query.replace('+','')
query = query.replace('/',' ')
query = query.replace('@',' ')
query = query.replace(';',' ')
query = query.replace (' ',' ')
#query += ' '.join(ecnum_list)
#urlstring = api_template + query
#form url, query, and write file
querylist = filter(None, query.split(' ') + ecnum_list)
urlstring = api_template + '+'.join(querylist)
#catch case of '+-' '+)+' '+(+' and convert url encoding
urlstring = urlstring.replace('+-','+')
#urlstring = urlstring.replace('+)+','+')
#urlstring = urlstring.replace('+(+','+')
#urlstring = urlstring.replace(' ','%20')
urlstring = urlstring.replace('(','%28')
urlstring = urlstring.replace(')','%29')
urlpool.append([urlstring, line])
# print 'Query: ' + urlstring
# r = requests.get(urlstring)
# if r.raise_for_status() == None:
# with open(outputdirectory + '/' + str(i) + '.KEGG_REST.txt', 'w') as restfile:
# restfile.write(r.text)
# restfile.close()
# searchfile.write(str(i) + '\t' + line + '\n')
# else:
# print 'Response error raised. Exiting'
# exit(-1)
#setup threading for http requests and run connections
concurrent = 100
exitapp = False
with open(outputdirectory + '/searchlist.txt', 'w') as searchfile, open(outputdirectory + '/errorlog.txt','w') as logfile:
q = Queue(concurrent * 2)
for i in range(concurrent):
t = Thread(target=doWork)
t.daemon = True
t.start()
try:
for id, urlentry in enumerate(urlpool):
q.put([id] + urlentry)
q.join()
except KeyboardInterrupt:
exitapp = True
sys.exit(1)
logfile.close()
searchfile.close()
logfile.close()
print "Parsing REST files and writing..."
outputfilename = options.outputfilename
outfile = open(outputdirectory + '/' + outputfilename, 'w')
with open(outputdirectory + '/searchlist.txt','U') as searchfile:
for line in searchfile:
i, query = line.strip().split('\t')
#form string for search
ecnum_list = re.findall('EC ' + ec_regex,query)
ecnum_list = [ecline[3:] for ecline in ecnum_list]
#querystring = '\t' + re.sub(' \(EC ' + ec_regex + '\)', '', query)
querystrings = [re.sub(' \(EC ' + ec_regex + '\)', '', querystring).lower() for querystring in re.split(' / | @ |; ', query)]
ecstring = '(EC:' + ' '.join(ecnum_list) + ');'
ko = []
with open(outputdirectory + '/' + str(i) + '.KEGG_REST.txt', 'U') as inrestfile:
for restline in inrestfile:
restline = restline.strip()
#if querystring == 'Chlorophyllide reductase subunit BchZ':
# print querystring, ecstring
# print querystring in restline, ecstring in restline
# if the enzyme search string and the modified ec string and a KEGG KO number are in the rest output, record the KO number
if all(querystring in restline.lower() for querystring in querystrings) and all(ecterm in restline for ecterm in ecnum_list) and re.search(r'; K\d{5}', restline) != None:
ko.append(re.search(r'; K\d{5}', restline).group(0)[2:])
inrestfile.close()
#determine and record the most common KO number and how common it was
counter= collections.Counter(ko)
if len(counter) > 0:
outfile.write(query + '\t' + counter.most_common(1)[0][0] + '\t' + str(counter.most_common(1)[0][1]) + '\t' + str(sum(counter.values())) + '\t' + querystring + '\t' + ecstring + '\n')
else:
outfile.write(query + '\t\n')
outfile.close()
print "Done!"
| mit | 3,400,266,808,481,066,000 | 47.301205 | 199 | 0.55126 | false | 3.42552 | false | false | false |
BrainTech/openbci | obci/utils/test_robot.py | 1 | 1110 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import urllib
class TestRobot(object):
def __init__(self, robot_ip):
self.robot_ip = robot_ip
def head_up(self):
# http://192.168.10.18/rev.cgi?Cmd=nav&action=18&drive=13&speed=5
params = urllib.urlencode({'Cmd': 'nav',
'action': 18,
'drive': 13,
'speed' : 5})
print params
try:
# f = urllib.urlopen("http://" + self.robot_ip + "/rev.cgi?%s" % params)
f = urllib.urlopen("http://192.168.1.9/rev.cgi?Cmd=nav&action=1")
except IOError, e:
print "Connection error: ", str(e)
else:
print f.read()
def parser():
parser = argparse.ArgumentParser(description="Test ROVIO robot!", epilog="(c) 2011, Warsaw University")
parser.add_argument('robot_ip', help='IP address of the ROVIO robot.')
return parser
if __name__ == '__main__':
args = parser().parse_args()
robot = TestRobot(args.robot_ip)
robot.head_up()
| gpl-3.0 | 8,965,823,590,068,635,000 | 29.833333 | 107 | 0.525225 | false | 3.627451 | false | false | false |
webpartners/maildump | maildump/__init__.py | 2 | 1225 | import asyncore
import gevent
from gevent.event import Event
from logbook import Logger
from socketio.server import SocketIOServer
from maildump.db import connect, disconnect, create_tables
from maildump.smtp import smtp_handler, SMTPServer
from maildump.web import app
from maildump.web_realtime import broadcast
log = Logger(__name__)
stopper = Event()
socketio_server = None
def start(http_host, http_port, smtp_host, smtp_port, db_path=None):
global socketio_server
# Webserver
log.notice('Starting web server on http://{0}:{1}'.format(http_host, http_port))
socketio_server = SocketIOServer((http_host, http_port), app,
log='default' if app.debug else None)
socketio_server.start()
# SMTP server
log.notice('Starting smtp server on {0}:{1}'.format(smtp_host, smtp_port))
SMTPServer((smtp_host, smtp_port), smtp_handler)
gevent.spawn(asyncore.loop)
# Database
connect(db_path)
create_tables()
# Wait....
try:
stopper.wait()
except KeyboardInterrupt:
print
else:
log.debug('Received stop signal')
# Clean up
disconnect()
log.notice('Terminating')
def stop():
stopper.set()
| mit | -6,663,160,390,148,330,000 | 25.630435 | 84 | 0.673469 | false | 3.656716 | false | false | false |
iocast/libwebsocket | libwebsocket/websocket/broadcast.py | 1 | 2293 | import struct
import logging
from threading import Thread
from time import sleep
from base import BaseHandler
# WebSocket implementation
class BroadcastHandler(BaseHandler, Thread):
@property
def server(self):
return self._server
@property
def tick_time(self):
return self._tick_time
def __init__(self, server, tick, *args, **kwargs):
super(BroadcastHandler, self).__init__(*args, **kwargs)
self._server = server
self._tick_time = tick
def run(self):
"""handle
>>> help(BaseHandler.handle)
"""
logging.info("Broadcast every %s" % str(self.tick))
# Keep serving broadcast
self.running = True
while self.running:
if len(self.server.connections) > 0:
self.tick()
sleep(self.tick_time/1000.0)
def tick(self):
raise NotImplementedError("Child need to implemet this!")
def sendMessage(self, client, s):
"""
Encode and send a WebSocket message
"""
# Empty message to start with
message = ""
# always send an entire message as one frame (fin)
b1 = 0x80
# in Python 2, strs are bytes and unicodes are strings
if type(s) == unicode:
b1 |= self.server.text
payload = s.encode("UTF8")
elif type(s) == str:
b1 |= self.server.text
payload = s
# Append 'FIN' flag to the message
message += chr(b1)
# never mask frames from the server to the client
b2 = 0
# How long is our payload?
length = len(payload)
if length < 126:
b2 |= length
message += chr(b2)
elif length < (2 ** 16) - 1:
b2 |= 126
message += chr(b2)
l = struct.pack(">H", length)
message += l
else:
l = struct.pack(">Q", length)
b2 |= 127
message += chr(b2)
message += l
# Append payload to message
message += payload
# Send to the client
client.send(str(message))
| mit | 842,796,525,149,692,300 | 22.397959 | 65 | 0.501526 | false | 4.549603 | false | false | false |
prppedro/mercurium | plugins/aulete.py | 1 | 3498 | from api import send_message, send_photo
import requests
import bs4
def on_msg_received(msg, matches):
url = "http://aulete.com.br/wap/resultado.php"
headers = {'User-Agent': 'Nokia6630/1.0 (2.3.129) SymbianOS/8.0 Series60/2.6 Profile/MIDP-2.0 Configuration/CLDC-1.1'}
data = { 'busca' : str(matches.group(1)).encode("iso-8859-1") }
output = requests.post(url, data, headers=headers)
ensopado = bs4.BeautifulSoup(output.content, "html.parser")
definicao_raw = ensopado.find_all("div", id="definicao")
spans = definicao_raw[0].find_all("span")
definicao = ""
if not spans:
definicao = definicao_raw[0].find(text = True)
#resultado = definicao_raw[0].findAll(text = True)
#for frase in resultado:
# definicao += frase
else:
# Usado para comparar em caso de duas acepções
cabecoAnterior = False
cabecoCounter = 0
for span in spans:
texto = span.findAll(text = True)
classe = span["class"][0]
# definicao += "```" + str(span['class']) + " = " + str(span.findAll(text = True)) + "```"
# Trata do caso em que existe mais de uma acepção
if classe == "cabeco":
if cabecoAnterior is False:
if cabecoCounter > 0:
definicao += "\r\n"
definicao += "*{}*".format(texto[0])
cabecoAnterior = True
cabecoCounter = cabecoCounter + 1
else:
definicao += ":_" + texto[0] + "_ "
cabecoAnterior = False
else:
cabecoAnterior = False
if classe == "sepsil":
# Gambiarrinha para conseguir pegar a sílaba tônica
# Na verdade, não funciona direito, parece... Depois tento entender o porquê
tonica = span.find("em", text = True)
# Para dar espaço entre o termo e a separação silábica
definicao += " "
for sil in texto:
if sil == tonica:
definicao += "_{}_".format(sil)
else:
definicao += "{}".format(sil)
# Não sei o que isso faz, mas vou colocar aqui, anyway
#if classe == "ort":
# Acho que não retorna nada, vou converter por segurança
# Do contrário, ele pode estourar a pilha de dar erro
# Adicionei também um pulo de linha para a próx. seção
#definicao += "[{}]\r\n".format(str(texto))
# Acabei desativando porque realmente não servia para muita coisa
if classe == "catgram":
definicao += "```{}```\r\n".format(texto[0])
if classe == "numdef":
definicao += " *{}* ".format(texto[0])
if classe == "rubrica" or classe == "regio" or classe == "uso":
definicao += "_{}_ ".format(texto[0])
if classe == "def":
definicao += "{}\r\n".format("".join(texto))
if classe == "achverb":
definicao += "\n_{}_\n".format("".join(texto))
print(definicao)
#parole = definicao_raw[0].find("span", class_="cabeco", text = True)
#parole = parole.find(text = True)
#definicao = "*{}*\nDebug: {}".format(parole, type(parole))
send_message(msg["chat"]["id"], str(definicao))
| mit | 2,658,021,087,867,210,000 | 38.965517 | 122 | 0.516825 | false | 3.258669 | false | false | false |
botswana-harvard/edc-locator | edc_locator/model_mixins/subject_contact_fields_mixin.py | 1 | 2191 | from django.db import models
from django.utils.safestring import mark_safe
from django_crypto_fields.fields import EncryptedCharField, EncryptedTextField
from edc_base.model_validators import CellNumber, TelephoneNumber
from edc_constants.choices import YES_NO
class SubjectContactFieldsMixin(models.Model):
may_call = models.CharField(
max_length=25,
choices=YES_NO,
verbose_name=mark_safe(
'Has the participant given permission <b>to contacted by telephone '
'or cell</b> by study staff for follow-up purposes during the study?'))
may_visit_home = models.CharField(
max_length=25,
choices=YES_NO,
verbose_name=mark_safe(
'Has the participant given permission for study '
'staff <b>to make home visits</b> for follow-up purposes?'))
may_sms = models.CharField(
max_length=25,
choices=YES_NO,
null=True,
blank=False,
verbose_name=mark_safe(
'Has the participant given permission <b>to be contacted by SMS</b> '
'by study staff for follow-up purposes during the study?'))
mail_address = EncryptedTextField(
verbose_name='Mailing address ',
max_length=500,
null=True,
blank=True)
physical_address = EncryptedTextField(
verbose_name='Physical address with detailed description',
max_length=500,
blank=True,
null=True,
help_text='')
subject_cell = EncryptedCharField(
verbose_name='Cell number',
validators=[CellNumber, ],
blank=True,
null=True,
help_text='')
subject_cell_alt = EncryptedCharField(
verbose_name='Cell number (alternate)',
validators=[CellNumber, ],
blank=True,
null=True)
subject_phone = EncryptedCharField(
verbose_name='Telephone',
validators=[TelephoneNumber, ],
blank=True,
null=True)
subject_phone_alt = EncryptedCharField(
verbose_name='Telephone (alternate)',
validators=[TelephoneNumber, ],
blank=True,
null=True)
class Meta:
abstract = True
| gpl-2.0 | 6,615,224,671,100,447,000 | 29.430556 | 83 | 0.628937 | false | 4.141777 | false | false | false |
Yubico/yubioath-desktop-dpkg | yubioath/gui/messages.py | 1 | 5776 | # Copyright (c) 2014 Yubico AB
# All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Additional permission under GNU GPL version 3 section 7
#
# If you modify this program, or any covered work, by linking or
# combining it with the OpenSSL project's OpenSSL library (or a
# modified version of that library), containing parts covered by the
# terms of the OpenSSL or SSLeay licenses, We grant you additional
# permission to convey the resulting work. Corresponding Source for a
# non-source form of such a combination shall include the source code
# for the parts of OpenSSL used as well as that of the covered work.
"""
Strings for Yubico Authenticator.
Note: String names must not start with underscore (_).
"""
organization = "Yubico"
domain = "yubico.com"
app_name = "Yubico Authenticator"
win_title_1 = "Yubico Authenticator (%s)"
about_1 = "About: %s"
copyright = "Copyright © Yubico"
libraries = "Library versions"
version_1 = "Version: %s"
wait = "Please wait..."
error = "Error"
menu_file = "&File"
menu_help = "&Help"
action_about = "&About"
action_add = "&Add..."
action_password = "Set/Change &password"
action_settings = "&Settings"
action_delete = "&Delete"
action_show = "&Show credentials"
action_close = "&Close Window"
action_quit = "&Quit"
password = "Password"
settings = "Settings"
advanced = "Advanced"
search = "Search"
pass_required = "Password required"
remember = "Remember password"
no_key = "Insert a YubiKey..."
key_busy = "YubiKey already in use!"
key_present = "YubiKey found. Reading..."
key_removed = "YubiKey removed"
key_removed_desc = "There was an error communicating with the device!"
ykstd_slots = "YubiKey standard slots"
enable_slot_1 = "Read from slot %d"
n_digits = "Number of digits"
enable_systray = "Show in system tray"
kill_scdaemon = "Kill scdaemon on show"
reader_name = "Card reader name"
no_creds = "No credentials available"
add_cred = "New credential"
cred_name = "Credential name"
cred_key = "Secret key (base32)"
cred_type = "Credential type"
cred_totp = "Time based (TOTP)"
cred_hotp = "Counter based (HOTP)"
algorithm = "Algorithm"
invalid_name = "Invalid name"
invalid_name_desc = "Name must be at least 3 characters"
invalid_key = "Invalid key"
invalid_key_desc = "Key must be base32 encoded"
set_pass = "Set password"
new_pass = "New password (blank for none)"
ver_pass = "Verify new password"
pass_mismatch = "Passwords do not match"
pass_mismatch_desc = "Please enter the same password twice"
touch_title = "Touch required"
touch_desc = "Touch your YubiKey now"
delete_title = "Confirm credential deletion"
delete_desc_1 = """<span>Are you sure you want to delete the credential?</span>
<br>
This action cannot be undone.
<br><br>
<b>Delete credential: %s</b>
"""
slot = "YubiKey slot"
slot_2 = "Slot %d (%s)"
free = "free"
in_use = "in use"
require_touch = "Require touch"
no_slot = "No slot chosen"
no_slot_desc = "Please choose a slot to write the credential to"
overwrite_slot = "Overwrite slot?"
overwrite_slot_desc_1 = "This will overwrite the credential currently " \
"stored in slot %d. This action cannot be undone."
overwrite_entry = "Overwrite entry?"
overwrite_entry_desc = "An entry with this username already exists.\n\nDo " \
"you wish to overwrite it? This action cannot be undone."
qr_scan = "Scan a QR code"
qr_scanning = "Scanning for QR code..."
qr_not_found = "QR code not found"
qr_not_found_desc = "No usable QR code detected. Make sure the QR code is " \
"fully visible on your primary screen and try again."
qr_not_supported = "Credential not supported"
qr_not_supported_desc = "This credential type is not supported for slot " \
"based usage."
qr_invalid_type = "Invalid OTP type"
qr_invalid_type_desc = "Only TOTP and HOTP types are supported."
qr_invalid_digits = "Invalid number of digits"
qr_invalid_digits_desc = "An OTP may only contain 6 or 8 digits."
qr_invalid_algo = "Unsupported algorithm"
qr_invalid_algo_desc = "SHA1 and SHA256 are the only supported OTP " \
"algorithms at this time."
tt_slot_enabled_1 = "Check to calculate TOTP codes using the YubiKey " \
"standard slot %d credential."
tt_num_digits = "The number of digits to show for the credential."
tt_systray = "When checked, display an icon in the systray, and leave the " \
"application running there when closed."
tt_kill_scdaemon = "Kills any running scdaemon process when the window is " \
"shown. This is useful when using this application together with GnuPG " \
"to avoid GnuPG locking the device."
tt_reader_name = "Changes the default smartcard reader name to look for. " \
"This can be used to target a specific YubiKey when multiple are used, " \
"or to target an NFC reader."
ccid_disabled = '<b>CCID (smart card capabilities) is disabled on the ' \
'inserted YubiKey.</b><br><br>Without CCID enabled, you will only be ' \
'able to store 2 credentials.<br><br>' \
'<a href="%s">Learn how to enable CCID</a><br>'
no_space = "No space available"
no_space_desc = "There is not enough space to add another " \
"credential on your device.\n\nTo create free space to add a " \
"new credential, delete those you no longer need."
| gpl-3.0 | 6,331,565,622,873,595,000 | 39.391608 | 79 | 0.723857 | false | 3.429929 | false | false | false |
kivy/plyer | plyer/platforms/win/cpu.py | 1 | 6646 | '''
Module of Windows API for plyer.cpu.
'''
from ctypes import (
c_ulonglong, c_ulong, byref,
Structure, POINTER, Union, windll, create_string_buffer,
sizeof, cast, c_void_p, c_uint32
)
from ctypes.wintypes import (
BYTE, DWORD, WORD
)
from plyer.facades import CPU
KERNEL = windll.kernel32
ERROR_INSUFFICIENT_BUFFER = 0x0000007A
class CacheType:
'''
Win API PROCESSOR_CACHE_TYPE enum.
'''
unified = 0
instruction = 1
data = 2
trace = 3
class RelationshipType:
'''
Win API LOGICAL_PROCESSOR_RELATIONSHIP enum.
'''
processor_core = 0 # logical proc sharing single core
numa_node = 1 # logical proc sharing single NUMA node
cache = 2 # logical proc sharing cache
processor_package = 3 # logical proc sharing physical package
group = 4 # logical proc sharing processor group
all = 0xffff # logical proc info for all groups
class CacheDescriptor(Structure):
'''
Win API CACHE_DESCRIPTOR struct.
'''
_fields_ = [
('Level', BYTE),
('Associativity', BYTE),
('LineSize', WORD),
('Size', DWORD),
('Type', DWORD)
]
class ProcessorCore(Structure):
'''
Win API ProcessorCore struct.
'''
_fields_ = [('Flags', BYTE)]
class NumaNode(Structure):
'''
Win API NumaNode struct.
'''
_fields_ = [('NodeNumber', DWORD)]
class SystemLPIUnion(Union):
'''
Win API SYSTEM_LOGICAL_PROCESSOR_INFORMATION union without name.
'''
_fields_ = [
('ProcessorCore', ProcessorCore),
('NumaNode', NumaNode),
('Cache', CacheDescriptor),
('Reserved', c_ulonglong)
]
class SystemLPI(Structure):
'''
Win API SYSTEM_LOGICAL_PROCESSOR_INFORMATION struct.
'''
_fields_ = [
('ProcessorMask', c_ulong),
('Relationship', c_ulong),
('LPI', SystemLPIUnion)
]
class WinCPU(CPU):
'''
Implementation of Windows CPU API.
'''
@staticmethod
def _countbits(mask):
# make sure the correct ULONG_PTR size is used on 64bit
# https://docs.microsoft.com/en-us/windows/
# desktop/WinProg/windows-data-types
# note: not a pointer per-se, != PULONG_PTR
ulong_ptr = c_ulonglong if sizeof(c_void_p) == 8 else c_ulong
# note: c_ulonglong only on 64bit, otherwise c_ulong
# DWORD == c_uint32
# https://docs.microsoft.com/en-us/windows/
# desktop/WinProg/windows-data-types
lshift = c_uint32(sizeof(ulong_ptr) * 8 - 1)
assert lshift.value in (31, 63), lshift # 32 or 64 bits - 1
lshift = lshift.value
test = 1 << lshift
assert test % 2 == 0, test
count = 0
i = 0
while i <= lshift:
i += 1
# do NOT remove!!!
# test value has to be %2 == 0,
# except the last case where the value is 1,
# so that int(test) == int(float(test))
# and the mask bit is counted correctly
assert test % 2 == 0 or float(test) == 1.0, test
# https://stackoverflow.com/a/1746642/5994041
# note: useful to print(str(bin(int(...)))[2:])
count += 1 if (mask & int(test)) else 0
test /= 2
return count
def _logprocinfo(self, relationship):
get_logical_process_info = KERNEL.GetLogicalProcessorInformation
# first call with no structure to get the real size of the required
buff_length = c_ulong(0)
result = get_logical_process_info(None, byref(buff_length))
assert not result, result
error = KERNEL.GetLastError()
assert error == ERROR_INSUFFICIENT_BUFFER, error
assert buff_length, buff_length
# create buffer from the real winapi buffer length
buff = create_string_buffer(buff_length.value)
# call again with buffer pointer + the same length as arguments
result = get_logical_process_info(buff, byref(buff_length))
assert result, (result, KERNEL.GetLastError())
# memory size of one LPI struct in the array of LPI structs
offset = sizeof(SystemLPI) # ok
values = {
key: 0 for key in (
'relationship', 'mask',
'L1', 'L2', 'L3'
)
}
for i in range(0, buff_length.value, offset):
slpi = cast(
buff[i: i + offset],
POINTER(SystemLPI)
).contents
if slpi.Relationship != relationship:
continue
values['relationship'] += 1
values['mask'] += self._countbits(slpi.ProcessorMask)
if slpi.LPI.Cache.Level == 1:
values['L1'] += 1
elif slpi.LPI.Cache.Level == 2:
values['L2'] += 1
elif slpi.LPI.Cache.Level == 3:
values['L3'] += 1
return values
def _sockets(self):
# physical CPU sockets (or slots) on motherboard
return self._logprocinfo(
RelationshipType.processor_package
)['relationship']
def _physical(self):
# cores
return self._logprocinfo(
RelationshipType.processor_core
)['relationship']
def _logical(self):
# cores * threads
# if hyperthreaded core -> more than one logical processor
return self._logprocinfo(
RelationshipType.processor_core
)['mask']
def _cache(self):
# L1, L2, L3 cache count
result = self._logprocinfo(
RelationshipType.cache
)
return {
key: result[key]
for key in result
if key in ('L1', 'L2', 'L3')
}
def _numa(self):
# numa nodes
return self._logprocinfo(
RelationshipType.numa_node
)['relationship']
def instance():
'''
Instance for facade proxy.
'''
return WinCPU()
# Resources:
# GetLogicalProcessInformation
# https://msdn.microsoft.com/en-us/library/ms683194(v=vs.85).aspx
# SYSTEM_LOGICAL_PROCESSOR_INFORMATION
# https://msdn.microsoft.com/en-us/library/ms686694(v=vs.85).aspx
# LOGICAL_PROCESSOR_RELATIONSHIP enum (0 - 4, 0xffff)
# https://msdn.microsoft.com/2ada52f0-70ec-4146-9ef7-9af3b08996f9
# CACHE_DESCRIPTOR struct
# https://msdn.microsoft.com/38cfa605-831c-45ef-a99f-55f42b2b56e9
# PROCESSOR_CACHE_TYPE
# https://msdn.microsoft.com/23044f67-e944-43c2-8c75-3d2fba87cb3c
# C example
# https://msdn.microsoft.com/en-us/904d2d35-f419-4e8f-a689-f39ed926644c
| mit | -8,452,032,269,745,349,000 | 25.373016 | 75 | 0.578694 | false | 3.671823 | true | false | false |
sonicyang/chiphub | digikey/migrations/0007_auto_20151121_0832.py | 1 | 1294 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('digikey', '0006_auto_20151028_0846'),
]
operations = [
migrations.CreateModel(
name='Order_Details',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('quantity', models.IntegerField()),
],
),
migrations.RemoveField(
model_name='components',
name='order_id',
),
migrations.RemoveField(
model_name='components',
name='quantity',
),
migrations.AddField(
model_name='order_details',
name='component',
field=models.ForeignKey(to='digikey.Components'),
),
migrations.AddField(
model_name='order_details',
name='order',
field=models.ForeignKey(to='digikey.Orders'),
),
migrations.AddField(
model_name='components',
name='associated_order',
field=models.ManyToManyField(to='digikey.Orders', through='digikey.Order_Details'),
),
]
| mit | 6,385,116,923,972,012,000 | 28.409091 | 114 | 0.541731 | false | 4.524476 | false | false | false |
CERNDocumentServer/invenio | modules/miscutil/lib/pid_store.py | 1 | 14826 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2015 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""PersistentIdentifier store and registration.
Usage example for registering new identifiers::
from flask import url_for
from invenio.pid_store import PersistentIdentifier
# Reserve a new DOI internally first
pid = PersistentIdentifier.create('doi','10.0572/1234')
# Get an already reserved DOI
pid = PersistentIdentifier.get('doi', '10.0572/1234')
# Assign it to a record.
pid.assign('rec', 1234)
url = url_for("record.metadata", recid=1234, _external=True)
doc = "<resource ...."
# Pre-reserve the DOI in DataCite
pid.reserve(doc=doc)
# Register the DOI (note parameters depended on the provider and pid type)
pid.register(url=url, doc=doc)
# Reassign DOI to new record
pid.assign('rec', 5678, overwrite=True),
# Update provider with new information
pid.update(url=url, doc=doc)
# Delete the DOI (you shouldn't be doing this ;-)
pid.delete()
"""
import os
from datetime import datetime
from invenio.config import CFG_PYLIBDIR
from invenio.dbquery import run_sql
from invenio.pluginutils import PluginContainer
from invenio.pid_provider import PidProvider
PIDSTORE_OBJECT_TYPES = ['rec', ]
"""
Definition of supported object types
"""
#
# Internal configuration values. Normally you will not need to edit
# any of the configuration values below.
#
PIDSTORE_STATUS_NEW = 'N'
"""
The pid has *not* yet been registered with the service provider.
"""
PIDSTORE_STATUS_REGISTERED = 'R'
"""
The pid has been registered with the service provider.
"""
PIDSTORE_STATUS_DELETED = 'D'
"""
The pid has been deleted/inactivated with the service proivider. This should
very rarely happen, and must be kept track of, as the PID should not be
reused for something else.
"""
PIDSTORE_STATUS_RESERVED = 'K'
"""
The pid has been reserved in the service provider but not yet fully
registered.
"""
def plugin_builder(plugin_name, plugin_code):
if 'provider' in dir(plugin_code):
candidate = getattr(plugin_code, 'provider')
try:
if issubclass(candidate, PidProvider):
return candidate
except:
pass
raise ValueError('%s is not a valid PID provider' % plugin_name)
_PID_PROVIDERS = PluginContainer(
os.path.join(CFG_PYLIBDIR, 'invenio', 'pid_providers', '*.py'),
plugin_builder=plugin_builder)
class PersistentIdentifier(object):
"""
Store and register persistent identifiers
Assumptions:
* Persistent identifiers can be represented as a string of max 255 chars.
* An object has many persistent identifiers.
* A persistent identifier has one and only one object.
"""
def __init__(self, id=None, pid_type=None, pid_value=None,
pid_provider=None, status=None, object_type=None,
object_value=None, created=None, last_modified=None):
"""
:param id: Id of persistent identifier entry
:param pid_type: Persistent Identifier Schema
:param pid_str: Persistent Identifier
:param pid_provider: Persistent Identifier Provider
:param status: Status of persistent identifier, e.g. registered,
reserved, deleted
:param object_type: Object Type - e.g. rec for record
:param object_value: Object ID - e.g. a record id
:param created: Creation datetime of entry
:param last_modified: Last modification datetime of entry
"""
self.id = id
self.pid_type = pid_type
self.pid_value = pid_value
self.pid_provider = pid_provider
self.status = status
self.object_type = object_type
self.object_value = object_value
self.created = created or datetime.now()
self.last_modified = last_modified or datetime.now()
def __repr__(self):
return self.__dict__.__repr__()
#
# Class methods
#
@classmethod
def create(cls, pid_type, pid_value, pid_provider='', provider=None):
"""
Internally reserve a new persistent identifier in Invenio.
A provider for the given persistent identifier type must exists. By
default the system will choose a provider according to the pid
type. If desired, the default system provider can be overridden via
the provider keyword argument.
Returns PID object if successful otherwise None.
"""
# Ensure provider exists
if provider is None:
provider = PidProvider.create(pid_type, pid_value, pid_provider)
if not provider:
raise Exception(
"No provider found for %s:%s (%s)" % (
pid_type, pid_value, pid_provider)
)
try:
obj = cls(pid_type=provider.pid_type,
pid_value=provider.create_new_pid(pid_value),
pid_provider=pid_provider,
status=PIDSTORE_STATUS_NEW)
obj._provider = provider
run_sql(
'INSERT INTO pidSTORE '
'(pid_type, pid_value, pid_provider, status,'
' created, last_modified) '
'VALUES (%s, %s, %s, %s, NOW(), NOW())',
(obj.pid_type, obj.pid_value, obj.pid_provider, obj.status)
)
obj.log("CREATE", "Created")
return obj
except Exception, e:
obj.log("CREATE", e.message)
raise e
@classmethod
def get(cls, pid_type, pid_value, pid_provider='', provider=None):
"""
Get persistent identifier.
Returns None if not found.
"""
res = run_sql(
'SELECT id, pid_type, pid_value, pid_provider, status, '
'object_type, object_value, created, last_modified '
'FROM pidSTORE '
'WHERE pid_type=%s and pid_value=%s and pid_provider=%s',
(pid_type, pid_value, pid_provider)
)
try:
obj = cls(*res[0])
obj._provider = provider
return obj
except IndexError:
return None
@classmethod
def exists(cls, pid_type, pid_value):
"""Check existence of a PID."""
res = run_sql(
'SELECT id from pidSTORE where pid_type=%s and pid_value=%s',
(pid_type, pid_value))
return True if res else False
#
# Instance methods
#
def has_object(self, object_type, object_value):
"""
Determine if this persistent identifier is assigned to a specific
object.
"""
if object_type not in PIDSTORE_OBJECT_TYPES:
raise Exception("Invalid object type %s." % object_type)
return self.object_type == object_type and \
self.object_value == object_value
def get_provider(self):
"""
Get the provider for this type of persistent identifier
"""
if self._provider is None:
self._provider = PidProvider.create(
self.pid_type, self.pid_value, self.pid_provider
)
return self._provider
def assign(self, object_type, object_value, overwrite=False):
"""
Assign this persistent identifier to a given object
Note, the persistent identifier must first have been reserved. Also,
if an exsiting object is already assigned to the pid, it will raise an
exception unless overwrite=True.
"""
if object_type not in PIDSTORE_OBJECT_TYPES:
raise Exception("Invalid object type %s." % object_type)
if not self.id:
raise Exception(
"You must first create the persistent identifier before you "
"can assign objects to it."
)
if self.is_deleted():
raise Exception(
"You cannot assign objects to a deleted persistent identifier."
)
# Check for an existing object assigned to this pid
existing_obj_id = self.get_assigned_object(object_type)
if existing_obj_id and existing_obj_id != object_value:
if not overwrite:
raise Exception(
"Persistent identifier is already assigned to another "
"object"
)
else:
self.log(
"ASSIGN",
"Unassigned object %s:%s (overwrite requested)" % (
self.object_type, self.object_value)
)
self.object_type = None
self.object_value = None
elif existing_obj_id and existing_obj_id == object_value:
# The object is already assigned to this pid.
return True
self.object_type = object_type
self.object_value = object_value
self._update()
self.log("ASSIGN", "Assigned object %s:%s" % (self.object_type,
self.object_value))
return True
def update(self, with_deleted=False, *args, **kwargs):
""" Update the persistent identifier with the provider. """
if self.is_new() or self.is_reserved():
raise Exception(
"Persistent identifier has not yet been registered."
)
if not with_deleted and self.is_deleted():
raise Exception("Persistent identifier has been deleted.")
provider = self.get_provider()
if provider is None:
self.log("UPDATE", "No provider found.")
raise Exception("No provider found.")
if provider.update(self, *args, **kwargs):
if with_deleted and self.is_deleted():
self.status = PIDSTORE_STATUS_REGISTERED
self._update()
return True
return False
def reserve(self, *args, **kwargs):
"""
Reserve the persistent identifier with the provider
Note, the reserve method may be called multiple times, even if it was
already reserved.
"""
if not (self.is_new() or self.is_reserved()):
raise Exception(
"Persistent identifier has already been registered."
)
provider = self.get_provider()
if provider is None:
self.log("RESERVE", "No provider found.")
raise Exception("No provider found.")
if provider.reserve(self, *args, **kwargs):
self.status = PIDSTORE_STATUS_RESERVED
self._update()
return True
return False
def register(self, *args, **kwargs):
"""
Register the persistent identifier with the provider
"""
if self.is_registered() or self.is_deleted():
raise Exception(
"Persistent identifier has already been registered."
)
provider = self.get_provider()
if provider is None:
self.log("REGISTER", "No provider found.")
raise Exception("No provider found.")
if provider.register(self, *args, **kwargs):
self.status = PIDSTORE_STATUS_REGISTERED
self._update()
return True
return False
def delete(self, *args, **kwargs):
"""
Delete the persistent identifier
"""
if self.is_new():
# New persistent identifier which haven't been registered yet. Just
# delete it completely but keep log)
# Remove links to log entries (but otherwise leave the log entries)
run_sql('UPDATE pidLOG '
'SET id_pid=NULL WHERE id_pid=%s', (self.id, ))
run_sql("DELETE FROM pidSTORE WHERE id=%s", (self.id, ))
self.log("DELETE", "Unregistered PID successfully deleted")
else:
provider = self.get_provider()
if not provider.delete(self, *args, **kwargs):
return False
self.status = PIDSTORE_STATUS_DELETED
self._update()
return True
def sync_status(self, *args, **kwargs):
"""Synchronize persistent identifier status.
Used when the provider uses an external service, which might have been
modified outside of our system.
"""
provider = self.get_provider()
result = provider.sync_status(self, *args, **kwargs)
self._update()
return result
def get_assigned_object(self, object_type=None):
if object_type is not None and self.object_type == object_type:
return self.object_value
return None
def is_registered(self):
"""Returns true if the persistent identifier has been registered """
return self.status == PIDSTORE_STATUS_REGISTERED
def is_deleted(self):
"""Returns true if the persistent identifier has been deleted """
return self.status == PIDSTORE_STATUS_DELETED
def is_new(self):
"""
Returns true if the persistent identifier has not yet been
registered or reserved
"""
return self.status == PIDSTORE_STATUS_NEW
def is_reserved(self):
"""
Returns true if the persistent identifier has not yet been
reserved.
"""
return self.status == PIDSTORE_STATUS_RESERVED
def log(self, action, message):
if self.pid_type and self.pid_value:
message = "[%s:%s] %s" % (self.pid_type, self.pid_value, message)
run_sql('INSERT INTO pidLOG (id_pid, timestamp, action, message)'
'VALUES(%s, NOW(), %s, %s)', (self.id, action, message))
def _update(self):
"""Update the pidSTORE (self) object status on the DB."""
run_sql(
'UPDATE pidSTORE '
'SET status=%s, object_type=%s, object_value=%s, '
'last_modified=NOW() WHERE pid_type=%s and pid_value=%s',
(self.status, self.object_type, self.object_value,
self.pid_type, self.pid_value)
)
| gpl-2.0 | 7,298,396,814,584,288,000 | 33.319444 | 79 | 0.595103 | false | 4.403326 | false | false | false |
XeryusTC/lanciesite | pointofsale/views.py | 1 | 6459 | import datetime
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse, reverse_lazy
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.utils.decorators import method_decorator
from django.views.generic.base import TemplateView, RedirectView
from django.views.generic.edit import FormView
from pointofsale.models import Drink, Account, DrinkOrder
from pubsite.models import Participant, get_current_event, Event
class SaleView(TemplateView):
template_name = "pointofsale/sale.html"
success_url = reverse_lazy("pos:sale")
insufficient = False
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(SaleView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(SaleView, self).get_context_data(**kwargs)
# display an error if there was not enough money in the account to buy a drink
context['insufficient'] = self.insufficient
# get the current event or don't do anything if there is none
try:
event = get_current_event()
except:
return context
context['drinks'] = Drink.objects.all()
context['accounts'] = Account.objects.filter(participant__event=event)
# get the last few drinks that have been bought during the event
context['log'] = DrinkOrder.objects.filter(account__participant__event=event).order_by('-time')[:10]
return context
class ParticipantOverview(TemplateView):
template_name = "pointofsale/participants.html"
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ParticipantOverview, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ParticipantOverview, self).get_context_data(**kwargs)
# get the latest event and its participants
try:
participant_list = Participant.objects.filter(event=get_current_event())
# sort the participants according to whether they have an account or not
context['not_finished'] = []
context['finished'] = []
for p in participant_list:
try:
p.account
except Account.DoesNotExist:
# participant doesn't have an account
context['not_finished'].append(p)
else:
# participant does have an account
context['finished'].append(p)
except Event.DoesNotExist:
pass # return empty context
return context
class BuyDrinkRedirectView(RedirectView):
pattern_name = "pos:sale"
permanent = False
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(BuyDrinkRedirectView, self).dispatch(*args, **kwargs)
def get_redirect_url(self, participant, drink, quantity, *args, **kwargs):
try:
buy_drink(participant, drink, quantity)
except InsufficientFundsException:
self.pattern_name = "pos:sale_insufficient"
except Account.DoesNotExist:
# someone tried to buy something for an account which does not exist
# let it slide for now, but TODO: handle this gracefully
pass
return super(BuyDrinkRedirectView, self).get_redirect_url(*args, **kwargs)
class AddCreditsRedirectView(RedirectView):
pattern_name = "pos:participants"
permanent = False
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(AddCreditsRedirectView, self).dispatch(*args, **kwargs)
def get_redirect_url(self, participant, *args, **kwargs):
p = Participant.objects.get(pk=participant)
try:
p.account.credits += 5000
p.account.save()
except:
a = Account(participant=p, credits=5000)
a.save()
return super(AddCreditsRedirectView, self).get_redirect_url(*args, **kwargs)
class GenerateCSVView(TemplateView):
template_name = "pointofsale/csv.html"
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(GenerateCSVView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(GenerateCSVView, self).get_context_data(**kwargs)
context['csv'] = "id,amount,name,address,place,IBAN,email,date\n"
try:
e = get_current_event()
participants = Participant.objects.filter(event=e).order_by('account__debit_id')
for p in participants:
try:
id = p.account.debit_id
context['csv'] += """{id},{amount},"{name}","{address}","{place}","{iban}","{email}",{date}\n""".format(
id=id*2-1, amount=p.price, name=p.user.get_full_name(),
address=p.address + " " + p.postal_code, place=p.city,
iban=p.iban, email=p.user.email, date=e.start_date)
context['csv'] += """{id},{amount},"{name}","{address}","{place}","{iban}","{email}",{date}\n""".format(
id=id*2, amount=p.account.get_credits_used()/100.0, name=p.user.get_full_name(),
address=p.address + " " + p.postal_code, place=p.city,
iban=p.iban, email=p.user.email, date=e.end_date)
except:
# Nothing to do here, the participant doesn't have any costs so it shouldn't be reported in the csv
pass
except Event.DoesNotExist:
return context # There are no events so there is no CSV to be generated
return context
def render_to_response(self, context, **kwargs):
return super(TemplateView, self).render_to_response(context, content_type="text/plain", **kwargs) # override the MIME type
class InsufficientFundsException(Exception):
pass
def buy_drink(participant, drink, quantity):
p = Participant.objects.get(pk=participant)
d = Drink.objects.get(pk=drink)
quantity = int(quantity)
if p.account.get_credits_remaining() < d.price * quantity:
raise InsufficientFundsException()
for i in range(quantity):
do = DrinkOrder.objects.create(account=p.account, drink=d)
do.save()
| gpl-2.0 | 1,818,184,969,982,068,200 | 38.145455 | 130 | 0.629354 | false | 4.009311 | false | false | false |
geocryology/GeoCryoLabPy | equipment/thermistorCalibrate.py | 1 | 7565 | import datetime
import math
import sys
import time
from Keysight34972A import Keysight34972A
from Fluke7341 import Fluke7341
from Fluke1502A import Fluke1502A
NUM_PROBES = 2
PROBE_LIST = [1, 2]
SAMPLE_INTERVAL = 5
BUFFER_SIZE = 5000
STD_HOLD_COUNT = 5000
TEMP_INCREMENT = 0.0
""" QUICK VALUES FOR TESTING
SAMPLE_INTERVAL = 2
BUFFER_SIZE = 5
STD_HOLD_COUNT = 5
"""
class RingBuffer():
def __init__(self, size):
self.size = size
self.buffer = [0] * size
self.pointer = 0
self.count = 0
"""
for i in range(size):
if i % 2:
self.buffer[i] = -1e16
else:
self.buffer[i] = 1e16
"""
def update(self, value):
self.buffer[self.pointer] = value
self.pointer = (self.pointer + 1) % self.size
self.count += 1
def reset(self):
self.count = 0
def getAverage(self, silent=True):
if self.count < self.size:
if not silent:
print "[WARNING] Buffer has not been filled completely: [{}/{}]".format(self.count, self.size)
return sum(self.buffer) / self.size
def getSTD(self):
std = 0
avg = self.getAverage()
for i in range(self.size):
std += (self.buffer[i] - avg) ** 2
std /= self.size
return math.sqrt(std)
def thermalRamp(start, end, increment, daq, bath, thermalProbe):
setpoint = start
bath.setSetpoint(setpoint)
TEMP_INCREMENT = increment
timestamp = datetime.datetime.now().isoformat().split('.')[0].replace(':', '-')
csvFile = "calibration{}.csv".format(timestamp)
probeTitles = ",".join(["probe {}".format(i) for i in PROBE_LIST])
averageTitles = ",".join(["average {}".format(i) for i in PROBE_LIST])
f = open(csvFile, "w")
f.write("time, elapsed time, setpoint, bath temp, probe temp,{},{}\n".format(probeTitles, averageTitles))
f.close()
# create ring buffer for each thermistor
buffers = [RingBuffer(BUFFER_SIZE) for i in range(NUM_PROBES)]
minSTDs = [1e9 for i in range(NUM_PROBES)]
maxSTDs = [[0] * STD_HOLD_COUNT for i in range(NUM_PROBES)]
counts = [0 for i in range(NUM_PROBES)]
done = False
numMeasurements = 0
t0 = datetime.datetime.now()
equilibriumTime = time.time()
while not done:
try:
t1 = time.time()
bathTemp = float(bath.readTemp())
probeTemp = float(thermalProbe.readTemp())
resistances = daq.readValues()
numMeasurements += 1
t = datetime.datetime.now()
timestamp = "{}/{}/{} {}:{}:{}".format(t.month, t.day, t.year, t.hour, t.minute, t.second)
# calculate STD for all probes, update count if minimum doesn't change
for i in range(NUM_PROBES):
buffers[i].update(resistances[i])
std = buffers[i].getSTD()
if std < minSTDs[i]:
print "new lowest std"
minSTDs[i] = std
counts[i] = 0
elif int(std) > max(maxSTDs[i]):
print "std too high"
counts[i] = 0
else:
if numMeasurements > BUFFER_SIZE:
print "stabilizing"
counts[i] += 1
else:
print "need more measurements"
maxSTDs[i] = maxSTDs[i][1:] + [int(std)]
if abs(bathTemp - setpoint) > 0.01:
print "bathTemp ({}) != setpoint ({})".format(bathTemp, setpoint)
bath.setSetpoint(setpoint)
counts = [0 for count in counts]
# check if any probes are not at equilibrium
allEqualized = True
for i in range(NUM_PROBES):
if counts[i] < STD_HOLD_COUNT:
allEqualized = False
break
r = ",".join([str(i) for i in resistances])
a = ",".join([str(buffer.getAverage()) for buffer in buffers])
t = datetime.datetime.now() - t0
seconds = t.seconds % 60
minutes = (t.seconds / 60) % 60
hours = (t.seconds / 3600) % 24
elapsedTime = "{}:{}:{}".format(hours, minutes, seconds)
f = open(csvFile, "a")
f.write("{},{},{},{},{},{},{}".format(timestamp, elapsedTime, setpoint, bathTemp, probeTemp, r, a))
# go to next setpoint
if allEqualized and numMeasurements > BUFFER_SIZE:
print "equalized"
f.write(",{}".format(",".join([str(buffer.getAverage()) for buffer in buffers])))
if abs(setpoint - end) < 0.001:
done = True
else:
setpoint += TEMP_INCREMENT
bath.setSetpoint(setpoint)
for i in range(NUM_PROBES):
buffers[i].reset()
counts[i] = 0
numMeasurements = 0
equilibriumTime = time.time() - equilibriumTime
f.write(",{}".format(equilibriumTime))
equilibriumTime = time.time()
f.write("\n")
f.close()
print counts
sampleTime = time.time() - t1
if sampleTime < SAMPLE_INTERVAL:
print "Elapsed: {}, Sleeping: {}".format(sampleTime, SAMPLE_INTERVAL - sampleTime)
time.sleep(SAMPLE_INTERVAL - sampleTime)
except KeyboardInterrupt:
done = True
if __name__ == "__main__":
# Connect to and initialize DAQ
daq = Keysight34972A()
if not daq.connect():
print "Failed to connect to Keysight34972A".format()
exit(1)
daq.initialize(Keysight34972A.MODE_RESISTANCE, PROBE_LIST)
# Connect to and initialize bath
bath = Fluke7341()
if not bath.connect("COM5"):
print "Failed to connect to Fluke7341"
exit(1)
thermalProbe = Fluke1502A()
if not thermalProbe.connect("COM7"):
print "Failed to connect to Fluke1502A"
exit(1)
changeSetpoint = False
setpoint = 21.0
if len(sys.argv) > 1:
try:
setpoint = float(sys.argv[1])
changeSetpoint = True
except ValueError:
print "parameter must be a float"
bath.disconnect()
daq.disconnect()
exit()
if changeSetpoint:
bath.setSetpoint(setpoint)
# thermalRamp(1, -10, -0.1, daq, bath, thermalProbe)
# thermalRamp(-10, 1, 0.1, daq, bath, thermalProbe)
# thermalRamp(0, -4, -0.02, daq, bath, thermalProbe)
# thermalRamp(-4, -6, -0.1, daq, bath, thermalProbe)
# thermalRamp(-6, -10, -1.0, daq, bath, thermalProbe)
# thermalRamp(-10, -6, 1.0, daq, bath, thermalProbe)
# thermalRamp(-6, -4, 0.1, daq, bath, thermalProbe)
# thermalRamp(-4, 0, 0.02, daq, bath, thermalProbe)
thermalRamp(0.0, 1.0, 0.0, daq, bath, thermalProbe)
bath.disconnect()
daq.disconnect()
thermalProbe.disconnect()
| gpl-3.0 | 186,292,659,177,525,600 | 31.891304 | 111 | 0.506411 | false | 3.732116 | false | false | false |
LosFuzzys/CTFd | tests/teams/test_auth.py | 2 | 3278 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from CTFd.models import Users, db
from tests.helpers import (
create_ctfd,
destroy_ctfd,
gen_team,
gen_user,
login_as_user,
register_user,
)
def test_banned_team():
app = create_ctfd(user_mode="teams")
with app.app_context():
register_user(app)
team = gen_team(app.db, banned=True)
user = Users.query.filter_by(id=2).first()
user.team_id = team.id
db.session.commit()
client = login_as_user(app)
routes = ["/", "/challenges", "/api/v1/challenges"]
for route in routes:
r = client.get(route)
assert r.status_code == 403
destroy_ctfd(app)
def test_teams_join_get():
"""Can a user get /teams/join"""
app = create_ctfd(user_mode="teams")
with app.app_context():
register_user(app)
with login_as_user(app) as client:
r = client.get("/teams/join")
assert r.status_code == 200
destroy_ctfd(app)
def test_teams_join_post():
"""Can a user post /teams/join"""
app = create_ctfd(user_mode="teams")
with app.app_context():
gen_user(app.db, name="user")
gen_team(app.db, name="team")
with login_as_user(app) as client:
r = client.get("/teams/join")
assert r.status_code == 200
with client.session_transaction() as sess:
data = {
"name": "team",
"password": "password",
"nonce": sess.get("nonce"),
}
r = client.post("/teams/join", data=data)
assert r.status_code == 302
incorrect_data = data
incorrect_data["password"] = ""
r = client.post("/teams/join", data=incorrect_data)
assert r.status_code == 200
destroy_ctfd(app)
def test_team_login():
"""Can a user login as a team"""
app = create_ctfd(user_mode="teams")
with app.app_context():
user = gen_user(app.db, name="user")
team = gen_team(app.db)
user.team_id = team.id
team.members.append(user)
app.db.session.commit()
with login_as_user(app) as client:
r = client.get("/team")
assert r.status_code == 200
destroy_ctfd(app)
def test_team_join_ratelimited():
"""Test that team joins are ratelimited"""
app = create_ctfd(user_mode="teams")
with app.app_context():
gen_user(app.db, name="user")
gen_team(app.db, name="team")
with login_as_user(app) as client:
r = client.get("/teams/join")
assert r.status_code == 200
with client.session_transaction() as sess:
data = {
"name": "team",
"password": "wrong_password",
"nonce": sess.get("nonce"),
}
for _ in range(10):
r = client.post("/teams/join", data=data)
data["password"] = "password"
for _ in range(10):
r = client.post("/teams/join", data=data)
assert r.status_code == 429
assert Users.query.filter_by(id=2).first().team_id is None
destroy_ctfd(app)
| apache-2.0 | -1,840,673,921,199,951,600 | 29.924528 | 74 | 0.526541 | false | 3.626106 | true | false | false |
prisae/blog-notebooks | travelmaps.py | 1 | 7492 |
# coding: utf-8
# # `travelmaps`: Functions and settings to create beautiful global and local travel maps
#
# [Blog](http://werthmuller.org/blog)
# [Repo](http://github.com/prisae/blog-notebooks)
#
# See the blog post [Travel Maps](http://werthmuller.org/blog/2015/travelmap) for more explanations and some examples.
#
# - country : Plot/fill countries.
# - city : Plot and annotate cities.
# - arrow : Plot arrows from city to city.
#
# These functions are very basic, and include almost no checking or similar at all. Feel free to fork and improve them!
# In[2]:
import shapefile
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import rcParams, patheffects
from matplotlib.collections import LineCollection
# In[3]:
# Customized plt.xkcd()-settings
# http://jakevdp.github.io/blog/2013/07/10/XKCD-plots-in-matplotlib
rcParams['font.family'] = ['Humor Sans', 'Comic Sans MS']
rcParams['font.size'] = 8.0
rcParams['path.sketch'] = (1, 100, 2)
rcParams['path.effects'] = [patheffects.withStroke(linewidth=2, foreground="w")]
rcParams['axes.linewidth'] = 1.0
rcParams['lines.linewidth'] = 1.0
rcParams['figure.facecolor'] = 'white'
rcParams['grid.linewidth'] = 0.0
rcParams['axes.unicode_minus'] = False
# *Bayesian Methods for Hackers*-colour-cylce
# (https://github.com/pkgpl/PythonProcessing/blob/master/results/matplotlibrc.bmh.txt)
rcParams['axes.color_cycle'] = ['#348ABD', '#A60628', '#7A68A6', '#467821', '#D55E00',
'#CC79A7', '#56B4E9', '#009E73', '#F0E442', '#0072B2']
# Adjust dpi, so figure on screen and savefig looks the same
rcParams['figure.dpi'] = 300
rcParams['savefig.dpi'] = 300
# In[4]:
def country(countries, bmap, fc=None, ec='none', lw=1, alpha=1, adm=0, gadmpath='data/TravelMap/'):
"""Colour <countries> with a <bmap> projection.
This script is adapted from:
http://www.geophysique.be/2013/02/12/
matplotlib-basemap-tutorial-10-shapefiles-unleached-continued
I downloaded the countries shapefile from the *Global Administrative Areas*
website, [gadm.org](http://gadm.org).
=> You have to use the same abbreviations for the countries as GADM does, or adjust the script.
=> You have to download the shapefiles from GADM, and extract them into the <gadmpath> directory.
Of course, you can use any other shapfiles you have, and adjust the script accordingly.
Parameters
----------
countries : string or list of strings
Countries to be plotted.
bmap : handle
As you get from bmap = Basemap().
fc : None or colour, or list of colours; <None>
Face-colour for country; if <None>, it will cycle through colour-cycle.
ec : 'none' or colour (scalar or list); <'none'>
Edge-colour for country.
lw : scalar or list; <1>
Linewidth for country.
alpha: scalar or list; <1>
Transparency.
adm : {0, 1, 2, 3}; <0>
Administrative area to choose.
gadmpath : 'string'
Absolut or relative path to shapefiles.
"""
# Ensure countries is a list
if not isinstance(countries, list):
countries = [countries,]
# Get current axis
cax = plt.gca()
# Loop through the countries
for country in countries:
# Get shapefile for the country; extract shapes and records
r = shapefile.Reader(gadmpath+country+'_adm/'+country+'_adm'+str(adm))
shapes = r.shapes()
records = r.records()
# Loop through the records; for adm0 this is only 1 run
n = 0
for record, shape in zip(records,shapes):
lons,lats = zip(*shape.points)
data = np.array(bmap(lons, lats)).T
if len(shape.parts) == 1:
segs = [data,]
else:
segs = []
for i in range(1,len(shape.parts)):
index = shape.parts[i-1]
index2 = shape.parts[i]
segs.append(data[index:index2])
segs.append(data[index2:])
lines = LineCollection(segs,antialiaseds=(1,))
# If facecolor is provided, use; else cycle through colours
if fc:
if not isinstance(fc, list):
lines.set_facecolors(fc)
else:
lines.set_facecolors(fc[n])
else:
lines.set_facecolors(next(cax._get_lines.color_cycle))
# Edge colour
if not isinstance(ec, list):
lines.set_edgecolors(ec)
else:
lines.set_edgecolors(ec[n])
# Alpha
if not isinstance(alpha, list):
lines.set_alpha(alpha)
else:
lines.set_alpha(alpha[n])
# Line width
if not isinstance(lw, list):
lines.set_linewidth(lw)
else:
lines.set_linewidth(lw[n])
# Add to current plot
cax.add_collection(lines)
n += 1
# In[5]:
def city(city, name, bmap, mfc=None, color='b', offs=[.1, .1], halign='left'):
"""Plot a circle at <city> and annotate with <name>, with a <bmap> projection.
Parameters
----------
city : List of two scalars
[Northing, Easting].
name : string
name to be plotted with city.
bmap : handle
As you get from bmap = Basemap().
mfc : None or colour; <None>
Marker face-colour for city; if <None>, it will cycle through colour-cycle.
colour : 'none' or colour; <'b'>
Colour for <name>.
offs : List of two scalars; <[.1, .1]>
Offset for <name> from <city>.
halign : {'left', 'right', 'center'}; <'left'>
Alignment of <name> relative to <city>.
"""
# Get current axis
cax = plt.gca()
# Plot dot
# If mfc is provided, use; else cycle through colours
if not mfc:
mfc = next(cax._get_patches_for_fill.color_cycle)
bmap.plot(city[1], city[0], 'o', mfc=mfc, ms=4, mew=1, latlon=True)
# Annotate name
cax.annotate(name, bmap(city[1]+offs[0], city[0]+offs[1]),
horizontalalignment=halign, color=color, fontsize=7, zorder=10)
# In[6]:
def arrow(start, end, bmap, ec="k", fc="w", rad=-.3):
"""Plot an arrow from <start> to <end>, with a <bmap> projection.
Parameters
----------
start : List of two scalars
Start of arrow [Northing, Easting].
end : List of two scalars
End of arrow [Northing, Easting].
bmap : handle
As you get from bmap = Basemap().
ec : 'none' or colour; <'k'>
Edge-colour for arrow.
fc : 'none' or colour; <w>
Face-colour for arrow.
rad : Scalar; <.3]>
Curvature of arrow.
"""
# Get current axis
cax = plt.gca()
# Plot arrow
arrowstyle='Fancy, head_length=.6, head_width=.6, tail_width=.4'
cax.annotate('', bmap(end[1], end[0]), bmap(start[1], start[0]),
arrowprops=dict(arrowstyle=arrowstyle,
alpha=.6,
patchA=None,
patchB=None,
shrinkA=3,
shrinkB=3,
fc=fc, ec=ec,
connectionstyle="arc3, rad="+str(rad),
))
| cc0-1.0 | 6,862,216,792,494,137,000 | 32.747748 | 119 | 0.569407 | false | 3.589842 | false | false | false |
OpenMined/PySyft | packages/syft/examples/hyperledger-aries/src/AriesDuetTokenExchanger.py | 1 | 11076 | # stdlib
import asyncio
import json
import time
from typing import Dict as TypeDict
from typing import Optional
# third party
from aries_cloudcontroller import AriesAgentController
# syft absolute
from syft.grid.duet.exchange_ids import DuetCredentialExchanger
class AriesDuetTokenExchanger(DuetCredentialExchanger):
def __init__(self, agent_controller: AriesAgentController) -> None:
super().__init__()
self.agent_controller: AriesAgentController = agent_controller
self.responder_id: Optional[asyncio.Future] = None
self.proof_request: Optional[TypeDict] = None
self.is_verified: Optional[asyncio.Future] = None
self.duet_didcomm_connection_id: Optional[str] = None
self._register_agent_listeners()
# The DuetCredentialExchanger expects this method to be implemented.
# In this case we are establishing a DIDComm connection, challenging the connection
# with an optional authentication policy, then with successful connections, sending
# the duet token identifier over this channel.
def run(
self,
credential: str,
) -> str:
self.responder_id = asyncio.Future()
self.duet_token = credential
if self.join:
self._accept_duet_didcomm_invite()
else:
self._create_duet_didcomm_invitation()
loop = asyncio.get_event_loop()
if self.duet_didcomm_connection_id is not None:
self.await_active(self.duet_didcomm_connection_id)
else:
print("duet_didcomm_connection_id not set")
print("Sending Duet Token", self.duet_didcomm_connection_id, credential)
if self.is_verified:
if self.is_verified.result() is True:
print("Connection is Verified")
loop.run_until_complete(
self.agent_controller.messaging.send_message(
self.duet_didcomm_connection_id, credential
)
)
else:
print("Proof request not verified")
else:
print("No Proof Requested")
loop.run_until_complete(
self.agent_controller.messaging.send_message(
self.duet_didcomm_connection_id, credential
)
)
loop.run_until_complete(self.responder_id)
token = self.responder_id.result()
print("TOKEN ", token)
return token
def _accept_duet_didcomm_invite(self) -> None:
while True:
invite = input("♫♫♫ > Duet Partner's Aries Invitation: ") # nosec
loop = asyncio.get_event_loop()
# is_ready = False
try:
response = loop.run_until_complete(
self.agent_controller.connections.receive_invitation(invite)
)
print(response["connection_id"])
connection_id = response["connection_id"]
except Exception:
print(" > Error: Invalid invitation. Please try again.")
break
self.duet_didcomm_connection_id = connection_id
def _create_duet_didcomm_invitation(self) -> None:
loop = asyncio.get_event_loop()
response = loop.run_until_complete(
self.agent_controller.connections.create_invitation()
)
connection_id = response["connection_id"]
invite_message = json.dumps(response["invitation"])
print()
print("♫♫♫ > " + "STEP 1:" + " Send the aries invitation to your Duet Partner!")
print()
print(invite_message)
print()
self.duet_didcomm_connection_id = connection_id
# Should be converted to asycio Future
def await_active(self, connection_id: str) -> None:
print("Waiting for active connection", connection_id)
while True:
loop = asyncio.get_event_loop()
response = loop.run_until_complete(
self.agent_controller.connections.get_connection(connection_id)
)
is_ready = "active" == response["state"]
if is_ready:
print("Connection Active")
if self.proof_request:
self.is_verified = asyncio.Future()
self.challenge_connection(connection_id)
loop.run_until_complete(self.is_verified)
break
else:
time.sleep(2)
def challenge_connection(self, connection_id: str) -> None:
loop = asyncio.get_event_loop()
proof_request_web_request = {
"connection_id": connection_id,
"proof_request": self.proof_request,
"trace": False,
}
response = loop.run_until_complete(
self.agent_controller.proofs.send_request(proof_request_web_request)
)
print("Challenge")
print(response)
pres_ex_id = response["presentation_exchange_id"]
print(pres_ex_id)
def _register_agent_listeners(self) -> None:
print("REGISTER LISTENERS")
listeners = [
{"handler": self.messages_handler, "topic": "basicmessages"},
{"topic": "issue_credential", "handler": self.cred_handler},
{"handler": self.connection_handler, "topic": "connections"},
{"topic": "present_proof", "handler": self.proof_handler},
]
self.agent_controller.register_listeners(listeners, defaults=True)
def cred_handler(self, payload: TypeDict) -> None:
connection_id = payload["connection_id"]
exchange_id = payload["credential_exchange_id"]
state = payload["state"]
role = payload["role"]
print("\n---------------------------------------------------\n")
print("Handle Issue Credential Webhook")
print(f"Connection ID : {connection_id}")
print(f"Credential exchange ID : {exchange_id}")
print("Agent Protocol Role : ", role)
print("Protocol State : ", state)
print("\n---------------------------------------------------\n")
print("Handle Credential Webhook Payload")
if state == "offer_received":
print("Credential Offer Recieved")
proposal = payload["credential_proposal_dict"]
print(
"The proposal dictionary is likely how you would understand and "
+ "display a credential offer in your application"
)
print("\n", proposal)
print("\n This includes the set of attributes you are being offered")
attributes = proposal["credential_proposal"]["attributes"]
print(attributes)
# YOUR LOGIC HERE
elif state == "request_sent":
print(
"\nA credential request object contains the commitment to the agents "
+ "master secret using the nonce from the offer"
)
# YOUR LOGIC HERE
elif state == "credential_received":
print("Received Credential")
# YOUR LOGIC HERE
elif state == "credential_acked":
# YOUR LOGIC HERE
credential = payload["credential"]
print("Credential Stored\n")
print(credential)
print(
"\nThe referent acts as the identifier for retrieving the raw credential from the wallet"
)
# Note: You would probably save this in your application database
credential_referent = credential["referent"]
print("Referent", credential_referent)
def connection_handler(self, payload: TypeDict) -> None:
state = payload["state"]
connection_id = payload["connection_id"]
their_role = payload["their_role"]
routing_state = payload["routing_state"]
print("----------------------------------------------------------")
print("Connection Webhook Event Received")
print("Connection ID : ", connection_id)
print("State : ", state)
print("Routing State : ", routing_state)
print("Their Role : ", their_role)
print("----------------------------------------------------------")
def proof_handler(self, payload: TypeDict) -> None:
role = payload["role"]
connection_id = payload["connection_id"]
pres_ex_id = payload["presentation_exchange_id"]
state = payload["state"]
loop = asyncio.get_event_loop()
print(
"\n---------------------------------------------------------------------\n"
)
print("Handle present-proof")
print("Connection ID : ", connection_id)
print("Presentation Exchange ID : ", pres_ex_id)
print("Protocol State : ", state)
print("Agent Role : ", role)
print("Initiator : ", payload["initiator"])
print(
"\n---------------------------------------------------------------------\n"
)
if state == "presentation_received":
verified_response = loop.run_until_complete(
self.agent_controller.proofs.verify_presentation(pres_ex_id)
)
if self.is_verified is not None:
self.is_verified.set_result(verified_response["verified"] == "true")
print("Attributes Presented")
for (name, val) in verified_response["presentation"]["requested_proof"][
"revealed_attrs"
].items():
# This is the actual data that you want. It's a little hidden
print("Attribute : ", val)
print("Raw Value : ", val["raw"])
else:
print("is_verified Future has not been created")
# Receive basic messages
def messages_handler(self, payload: TypeDict) -> None:
print("Handle Duet ID", payload["content"])
if self.responder_id is not None:
self.responder_id.set_result(payload["content"])
else:
print("responder_id Future has not been created")
# Used for other Aries connections. E.g. with an issuer
def receive_invitation(self, invitation: str) -> str:
# Receive Invitation
loop = asyncio.get_event_loop()
response = loop.run_until_complete(
self.agent_controller.connections.receive_invitation(invitation)
)
# Print out accepted Invite and Alice's connection ID
print("Connection", response)
return response["connection_id"]
def create_invitation(self) -> str:
# Create Invitation
loop = asyncio.get_event_loop()
invite = loop.run_until_complete(
self.agent_controller.connections.create_invitation()
)
# connection_id = invite["connection_id"]
invite_message = json.dumps(invite["invitation"])
return invite_message
def configure_challenge(self, proof_request: TypeDict) -> None:
self.proof_request = proof_request
| apache-2.0 | -5,194,744,145,813,313,000 | 38.798561 | 105 | 0.564895 | false | 4.44159 | false | false | false |
openSUSE/python-suseapi | suseapi/test_userinfo.py | 2 | 2759 | # -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2015 Michal Čihař <mcihar@suse.cz>
#
# This file is part of python-suseapi
# <https://github.com/openSUSE/python-suseapi>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
'''
Testing of user information connector
'''
from unittest import TestCase
from mockldap import MockLdap
from suseapi.userinfo import UserInfo
def start_ldap_mock():
"""
Starts LDAP mocking.
"""
mockldap = MockLdap({
'o=Novell': {'o': 'Novell'},
'cn=mcihar,o=Novell': {
'mail': ['mcihar@suse.com'],
'ou': ['TestDept'],
'cn': ['mcihar'],
'uid': ['mcihar'],
},
'cn=foobar,o=Novell': {
'mail': ['foobar@suse.com'],
'ou': ['L3 Maintenance'],
'cn': ['foobar'],
'uid': ['foobar'],
},
})
mockldap.start()
return mockldap
class UserInfoTest(TestCase):
'''
User information tests.
'''
def test_department(self):
'''
Test department lookups.
'''
mockldap = start_ldap_mock()
try:
userinfo = UserInfo('ldap://ldap', 'o=novell')
# By mail with fixup
self.assertEqual(
'L3/Maintenance',
userinfo.get_department('foobar@novell.com')
)
# By UID
self.assertEqual(
'TestDept',
userinfo.get_department('mcihar')
)
# By UID from cache
self.assertEqual(
'TestDept',
userinfo.get_department('mcihar')
)
# By email
self.assertEqual(
'TestDept',
userinfo.get_department('mcihar@suse.com')
)
# Hardcoded entries
self.assertEqual(
'Security team',
userinfo.get_department('security-team@suse.de')
)
# Non existing entry
self.assertEqual(
'N/A',
userinfo.get_department('nobody')
)
finally:
mockldap.stop()
| gpl-3.0 | 8,879,091,268,890,904,000 | 27.412371 | 71 | 0.542816 | false | 3.965468 | true | false | false |
artefactual/archivematica-history | src/MCPClient/lib/clientScripts/getContentdmCollectionList.py | 1 | 1665 | #!/usr/bin/python -OO
# This file is part of Archivematica.
#
# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com>
#
# Archivematica is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Archivematica is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Archivematica. If not, see <http://www.gnu.org/licenses/>.
# @package Archivematica
# @subpackage archivematicaClientScript
# @author Mark Jordan <email>
# @version svn: $Id$
import sys
import json
import urllib
# The base URL will be specific to each CONTENTdm server; everything including and
# following 'dmwebservices' is the same.
try:
CollectionListUrl = 'http://%s/dmwebservices/index.php?q=dmGetCollectionList/json' % (sys.argv[1])
f = urllib.urlopen(CollectionListUrl)
collectionListString = f.read()
collectionList = json.loads(collectionListString)
except:
print "Cannot retrieve CONTENTdm collection list from " + sys.argv[1]
sys.exit(1)
# We only want two of the elements of each 'collection', alias and name.
cleanCollectionList = {}
for collection in collectionList:
for k, v in collection.iteritems():
cleanCollectionList[collection['name']] = collection['alias']
print(cleanCollectionList)
| agpl-3.0 | 8,629,844,122,104,608,000 | 35.195652 | 102 | 0.756156 | false | 3.741573 | false | false | false |
jbtule/keyczar | python/tests/keyczar_tests/keyczart_test.py | 9 | 4366 | #!/usr/bin/python
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Testcases to test behavior of Keyczart.
@author: arkajit.dey@gmail.com (Arkajit Dey)
"""
import unittest
from keyczar import readers
from keyczar import writers
from keyczar import keyczart
from keyczar import keyczar
from keyczar import keyinfo
class KeyczartTest(unittest.TestCase):
def setUp(self):
self.mock = readers.MockReader('TEST', keyinfo.ENCRYPT, keyinfo.AES)
self.mock.AddKey(42, keyinfo.PRIMARY)
self.mock.AddKey(77, keyinfo.ACTIVE)
self.mock.AddKey(99, keyinfo.INACTIVE)
keyczart.mock = self.mock # enable testing
def testCreate(self):
keyczart.main(['create', '--name=testCreate',
'--purpose=crypt', '--asymmetric=rsa'])
self.assertEquals('testCreate', self.mock.kmd.name)
self.assertEquals(keyinfo.DECRYPT_AND_ENCRYPT, self.mock.kmd.purpose)
self.assertEquals(keyinfo.RSA_PRIV, self.mock.kmd.type)
def testAddKey(self):
self.assertEquals(3, self.mock.numkeys)
keyczart.main(['addkey', '--status=primary'])
self.assertEquals(4, self.mock.numkeys)
# The next version number will be 100, since the previous max was 99
self.assertEquals(keyinfo.PRIMARY, self.mock.GetStatus(100))
self.assertEquals(keyinfo.ACTIVE, self.mock.GetStatus(42))
def testAddKeySizeFlag(self):
keyczart.main(['addkey', '--size=256'])
self.assertEquals(256, self.mock.GetKeySize(100))
def testAddKeyCrypterCreatesCrypter(self):
self.dummy_location = None
def dummyCreateCrypter(location):
self.dummy_location = location
return self.mock
keyczart._CreateCrypter = dummyCreateCrypter
keyczart.main(['addkey', '--crypter=foo'])
self.assertEquals(self.dummy_location, 'foo')
def testPubKey(self):
pubmock = readers.MockReader('PUBTEST', keyinfo.DECRYPT_AND_ENCRYPT,
keyinfo.RSA_PRIV)
pubmock.AddKey(33, keyinfo.PRIMARY, 1024) # small key size for fast tests
keyczart.mock = pubmock # use pubmock instead
self.assertEquals(None, pubmock.pubkmd)
keyczart.main(['pubkey'])
self.assertNotEqual(None, pubmock.pubkmd)
self.assertEquals('PUBTEST', pubmock.pubkmd.name)
self.assertEquals(keyinfo.ENCRYPT, pubmock.pubkmd.purpose)
self.assertEquals(keyinfo.RSA_PUB, pubmock.pubkmd.type)
self.assertTrue(pubmock.HasPubKey(33))
def testPromote(self):
keyczart.main(['promote', '--version=77'])
self.assertEquals(keyinfo.PRIMARY, self.mock.GetStatus(77))
self.assertEquals(keyinfo.ACTIVE, self.mock.GetStatus(42))
def testDemote(self):
keyczart.main(['demote', '--version=77'])
self.assertEquals(keyinfo.INACTIVE, self.mock.GetStatus(77))
def testRevoke(self):
self.assertTrue(self.mock.ExistsVersion(99))
keyczart.main(['revoke', '--version=99'])
self.assertFalse(self.mock.ExistsVersion(99))
def testWriteIsBackwardCompatible(self):
class MockWriter(writers.Writer):
num_created = 0
def WriteMetadata(self, metadata, overwrite=True):
return
def WriteKey(self, key, version_number, encrypter=None):
return
def Remove(self, version_number):
return
def Close(self):
return
@classmethod
def CreateWriter(cls, location):
MockWriter.num_created += 1
return MockWriter()
generic_keyczar = keyczar.GenericKeyczar(self.mock)
generic_keyczar.Write('foo')
self.assertEquals(1, MockWriter.num_created,
'Write("string") should have created a new writer')
def tearDown(self):
keyczart.mock = None
def suite():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(KeyczartTest))
return suite
if __name__ == "__main__":
unittest.main(defaultTest='suite')
| apache-2.0 | 3,661,573,578,239,872,500 | 32.328244 | 78 | 0.703848 | false | 3.495596 | true | false | false |
jonnybot0/emoji4unicode | src/update_e4u_from_nameslist.py | 6 | 3270 | #!/usr/bin/python2.4
#
# Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Update emoji4unicode.xml from a NamesList.txt file.
During the development of Unicode 6.0/ISO 10646 AMD8,
Emoji symbols may change code points, names, and annotations.
This script reads emoji4unicode.xml and a NamesList.txt file,
updates the XML data according to the NamesList,
and writes a modified XML file to ../generated/emoji4unicode.xml.
"""
__author__ = "Markus Scherer"
import os.path
import xml.dom.minidom
import emoji4unicode
import nameslist
def main():
here = os.path.dirname(__file__)
e4u_filename = os.path.join(here, "..", "data", "emoji4unicode.xml")
doc = xml.dom.minidom.parse(e4u_filename)
root = doc.documentElement
id_to_symbol = {}
for symbol in root.getElementsByTagName("e"):
id_to_symbol[symbol.getAttribute("id")] = symbol
nameslist_filename = os.path.join(here, "..", "data",
"unicode", "uc60-a-FDAM8-SanJose.lst")
for record in nameslist.Read(nameslist_filename):
if "uni" not in record:
continue
id = nameslist.GetEmojiID(record)
if not id:
continue
# Extract the old data from the emoji4unicode.xml <e> symbol element.
symbol = id_to_symbol[id]
old_uni = symbol.getAttribute("unicode")
old_name = symbol.getAttribute("name")
old_annotations = []
for element in symbol.getElementsByTagName("ann"):
old_annotations.append(element.firstChild.nodeValue.strip())
# Extract the new data from the NamesList record.
new_uni = record["uni"]
new_name = record["name"]
new_annotations = record["data"]
# Update the proposed Unicode code point.
if old_uni and not old_uni.startswith("+"):
print ("*** e-%s: setting proposed code point %s but " +
"old %s was not proposed" %
(id, new_uni, old_uni))
symbol.setAttribute("unicode", u"+" + new_uni)
# Update the proposed character name.
# Keep the previous name in an oldname attribute.
if old_name == new_name:
if symbol.getAttribute("oldname"):
symbol.removeAttribute("oldname")
else:
symbol.setAttribute("oldname", old_name)
symbol.setAttribute("name", new_name)
# Append new annotations.
for ann in new_annotations:
# Skip the Emoji symbol ID alias, and annotations that are not new.
if not ann.startswith(u"= e-") and ann not in old_annotations:
ann_element = doc.createElement("ann")
ann_element.appendChild(doc.createTextNode(ann))
symbol.appendChild(ann_element)
out_filename = os.path.join(here, "..", "generated", "emoji4unicode.xml")
emoji4unicode.Write(doc, out_filename)
doc.unlink()
if __name__ == "__main__":
main()
| apache-2.0 | -7,178,315,848,665,647,000 | 36.586207 | 75 | 0.68318 | false | 3.699095 | false | false | false |
korealerts1/sentry | src/sentry/api/serializers/models/project.py | 10 | 1618 | from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMemberType, Project, Team
@register(Project)
class ProjectSerializer(Serializer):
def get_attrs(self, item_list, user):
organization = item_list[0].team.organization
team_map = dict(
(t.id, t) for t in Team.objects.get_for_user(
organization=organization,
user=user,
)
)
result = {}
for project in item_list:
try:
team = team_map[project.team_id]
except KeyError:
access_type = None
else:
access_type = team.access_type
result[project] = {
'access_type': access_type,
}
return result
def serialize(self, obj, attrs, user):
from sentry import features
feature_list = []
if features.has('projects:quotas', obj, actor=user):
feature_list.append('quotas')
if features.has('projects:user-reports', obj, actor=user):
feature_list.append('user-reports')
return {
'id': str(obj.id),
'slug': obj.slug,
'name': obj.name,
'isPublic': obj.public,
'dateCreated': obj.date_added,
'features': feature_list,
'permission': {
'owner': attrs['access_type'] <= OrganizationMemberType.OWNER,
'admin': attrs['access_type'] <= OrganizationMemberType.ADMIN,
},
}
| bsd-3-clause | -8,629,694,404,805,901,000 | 28.962963 | 78 | 0.537701 | false | 4.420765 | false | false | false |
honnibal/spaCy | spacy/lang/el/get_pos_from_wiktionary.py | 1 | 2143 | # coding: utf8
from __future__ import unicode_literals
def get_pos_from_wiktionary():
import re
from gensim.corpora.wikicorpus import extract_pages
regex = re.compile(r"==={{(\w+)\|el}}===")
regex2 = re.compile(r"==={{(\w+ \w+)\|el}}===")
# get words based on the Wiktionary dump
# check only for specific parts
# ==={{κύριο όνομα|el}}===
expected_parts = [
"μετοχή",
"ρήμα",
"επίθετο",
"επίρρημα",
"ουσιαστικό",
"κύριο όνομα",
"άρθρο",
]
wiktionary_file_path = (
"/data/gsoc2018-spacy/spacy/lang/el/res/elwiktionary-latest-pages-articles.xml"
)
proper_names_dict = {
"ουσιαστικό": "nouns",
"επίθετο": "adjectives",
"άρθρο": "dets",
"επίρρημα": "adverbs",
"κύριο όνομα": "proper_names",
"μετοχή": "participles",
"ρήμα": "verbs",
}
expected_parts_dict = {}
for expected_part in expected_parts:
expected_parts_dict[expected_part] = []
for title, text, pageid in extract_pages(wiktionary_file_path):
if text.startswith("#REDIRECT"):
continue
title = title.lower()
all_regex = regex.findall(text)
all_regex.extend(regex2.findall(text))
for a in all_regex:
if a in expected_parts:
expected_parts_dict[a].append(title)
for i in expected_parts_dict:
with open("_{0}.py".format(proper_names_dict[i]), "w") as f:
f.write("from __future__ import unicode_literals\n")
f.write('{} = set("""\n'.format(proper_names_dict[i].upper()))
words = sorted(expected_parts_dict[i])
line = ""
to_write = []
for word in words:
if len(line + " " + word) > 79:
to_write.append(line)
line = ""
else:
line = line + " " + word
f.write("\n".join(to_write))
f.write('\n""".split())')
| mit | 4,177,426,554,848,515,600 | 29.343284 | 87 | 0.515494 | false | 3.002954 | false | false | false |
projectbuendia/server-status | lcd/report_lines_and_battery.py | 1 | 1426 | #!/usr/bin/env python
import Adafruit_Nokia_LCD as LCD
import Adafruit_GPIO.SPI as SPI
from PIL import Image
from PIL import ImageFont
from PIL import ImageDraw
import sys
from re import sub
import os
os.chdir('/home/root/gpio')
# Edison software SPI config:
SCLK = 35 # 10
DIN = 26 # 11
DC = 25 # 32
RST = 45 # 46
CS = 31 # 23
disp = LCD.PCD8544(DC, RST, SCLK, DIN, CS)
with open('/home/root/gpio/contrast.txt', "r") as f:
contrast = int(sub('\\n', '', f.read()))
disp.begin(contrast = contrast)
disp.clear()
font = ImageFont.truetype('/home/root/gpio/fonts/Minecraftia-Regular.ttf', 8)
#font = ImageFont.load_default()
# new line positions
h = [-2,6,14,22,30,38]
# initialise
image = Image.new('1', (LCD.LCDWIDTH, LCD.LCDHEIGHT))
draw = ImageDraw.Draw(image)
draw.rectangle((0,0,LCD.LCDWIDTH,LCD.LCDHEIGHT), outline=255, fill=255)
# report lines
n = min(5, len(sys.argv)-1)
for i in range(n):
draw.text((0,h[i]+0), sys.argv[i+1], font=font)
# Battery bar
ind = 9
Y = 40
with open('battery_charge.txt', "r") as f:
charge_val = int(f.read())
charge = int(50 * (float(charge_val) / 100))
draw.polygon([(0,1+Y), (2,1+Y), (2,0+Y), (4,0+Y), (4,1+Y), (6,1+Y), (6,7+Y), (0,7+Y)], outline=0, fill=255)
draw.text((61,Y-1), str(charge_val) + '%', font=font)
draw.rectangle((ind,Y+1,ind+50,7+Y), outline=0, fill=255)
draw.rectangle((ind,Y+1,ind+charge,7+Y), outline=0, fill=0)
disp.image(image)
disp.display()
| apache-2.0 | 5,995,362,404,635,405,000 | 23.169492 | 107 | 0.657083 | false | 2.376667 | false | false | false |
wayn3/finite_size_correction | cube.py | 1 | 3476 | #!/usr/bin/env python
'''planar-average of a cube file
wei.chen@epfl.ch'''
import numpy as np
import sys
class Readcube:
def __init__(self, filename, zdef=0):
try:
f = open(filename)
except IOError:
sys.exit('File not found.')
[f.readline() for i in xrange(2)]
# na: number of atoms
na = int(f.readline().split()[0])
# ng: 3D grid points, ns: spacing vector
ng = np.array([[0,0,0]])
ns = np.zeros((3,3))
for i in xrange(3):
s = f.readline().split()
ng[:,i] = int(s[0])
ns[i] = float(s[1]), float(s[2]), float(s[3])
# read the positions
pos = np.zeros((na,3))
for i in xrange(na):
s = f.readline().split()
pos[i,:] = s[2:]
# real space lattice vector
rvec = ns*ng.T
dat = str2array(f.readlines())
f.close()
# comply to the cube format
dat = dat.reshape(ng[0,:])
self.na = na
self.ng = ng
self.ns = ns
self.dat = dat
self.rvec = rvec
self.pos = pos
self.zdef = zdef
def str2array(str):
return np.fromstring(''.join(str), sep=' ')
class Readpot:
def __init__(self, filename):
try:
f = open(filename)
except IOError:
sys.exit('File not found.')
f.readline()
head = f.readline().split()
ng = [0,0,0]
ng[0], ng[1], ng[2] = int(head[0]), int(head[1]), int(head[2])
na = int(head[6])
ntype = int(head[7]) # number of atom types
head = f.readline().split()
scale = float(head[1])
# rvec: real-space lattice vector
rvec = np.zeros([3,3])
for i in xrange(3):
s = f.readline().split()
rvec[i,:] = float(s[0]), float(s[1]), float(s[2])
rvec *= scale
[f.readline() for i in xrange(ntype+1)]
# direct coordinates
pos = np.zeros((na,3))
for i in xrange(na):
s = f.readline().split()
pos[i,:] = s[1:4]
dat = f.readlines()
f.close()
self.dat = dat
self.na = na
self.ng = ng
self.rvec = rvec
self.pos = pos
class Shift1d:
def __init__(self, z, y, zdef):
N = z.size
Z = z.max()-z.min()
# fractional zdef
fzdef = zdef/Z
# fftshift
# g: reciprocal vector
g = np.fft.fftfreq(N)*N
yg = np.fft.fft(y)
ygs = yg*np.exp(-2j*np.pi*g*(0.5-fzdef))
ys = np.fft.ifft(ygs)
# centering the defect
self.zs = np.mgrid[-0.5:0.5:1./N]*Z
self.fzdef = fzdef
self.g = g
self.ys = ys
if __name__ == "__main__":
cube = Readcube(sys.argv[1], float(sys.argv[2]))
ngrid = cube.ng[0]
# print number of atoms, and fft grid
print(cube.na, ngrid)
dir = 2 # 0->x, 1->y, 2->z
avg_1d = np.zeros(ngrid[dir])
for i in xrange(ngrid[dir]):
avg_1d[i] = np.average(cube.dat[:,:,i])
zlen = np.linalg.norm(cube.rvec[dir,:])
z = np.linspace(0, zlen, ngrid[dir],endpoint=False)
if float(sys.argv[2]) == 0:
dump = zip(z, avg_1d)
else:
shift = Shift1d(z, avg_1d, cube.zdef)
dump = zip(z, shift.ys.real)
np.savetxt(sys.argv[1].rsplit(".")[0]+"_1d.dat",dump)
avg = np.average(avg_1d)
print(avg)
| gpl-2.0 | 7,067,332,705,653,154,000 | 25.333333 | 70 | 0.481876 | false | 3.09528 | false | false | false |
googleapis/python-aiplatform | samples/snippets/export_model_sample_test.py | 1 | 1390 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pytest
import export_model_sample
PROJECT_ID = os.getenv("BUILD_SPECIFIC_GCLOUD_PROJECT")
MODEL_ID = (
"3422489426196955136" # permanent_swim_run_videos_action_recognition_edge_model
)
GCS_URI = (
"gs://ucaip-samples-test-output/tmp/export_model_sample"
)
@pytest.fixture(scope="function", autouse=True)
def teardown(storage_client):
yield
bucket = storage_client.get_bucket("ucaip-samples-test-output")
blobs = bucket.list_blobs(prefix="tmp/export_model_sample")
for blob in blobs:
blob.delete()
def test_export_model_sample(capsys):
export_model_sample.export_model_sample(
project=PROJECT_ID,
model_id=MODEL_ID,
gcs_destination_output_uri_prefix=GCS_URI
)
out, _ = capsys.readouterr()
assert "output_info" in out
| apache-2.0 | -1,274,198,760,584,094,200 | 28.574468 | 84 | 0.722302 | false | 3.457711 | false | false | false |
uni-peter-zheng/virt-test | virttest/utils_misc_unittest.py | 14 | 7598 | #!/usr/bin/python
import os
import tempfile
import unittest
import common
from autotest.client import utils
from autotest.client.shared.test_utils import mock
import utils_misc
import cartesian_config
import build_helper
class TestUtilsMisc(unittest.TestCase):
def test_cpu_vendor_intel(self):
cpu_info = """processor : 0
vendor_id : GenuineIntel
cpu family : 6
model : 58
model name : Intel(R) Core(TM) i7-3770 CPU @ 3.40GHz
"""
vendor = utils_misc.get_cpu_vendor(cpu_info, False)
self.assertEqual(vendor, 'GenuineIntel')
def test_cpu_vendor_amd(self):
cpu_info = """processor : 3
vendor_id : AuthenticAMD
cpu family : 21
model : 16
model name : AMD A10-5800K APU with Radeon(tm) HD Graphics
"""
vendor = utils_misc.get_cpu_vendor(cpu_info, False)
self.assertEqual(vendor, 'AuthenticAMD')
def test_vendor_unknown(self):
cpu_info = "this is an unknown cpu"
vendor = utils_misc.get_cpu_vendor(cpu_info, False)
self.assertEqual(vendor, 'unknown')
def test_get_archive_tarball_name(self):
tarball_name = utils_misc.get_archive_tarball_name('/tmp',
'tmp-archive',
'bz2')
self.assertEqual(tarball_name, 'tmp-archive.tar.bz2')
def test_get_archive_tarball_name_absolute(self):
tarball_name = utils_misc.get_archive_tarball_name('/tmp',
'/var/tmp/tmp',
'bz2')
self.assertEqual(tarball_name, '/var/tmp/tmp.tar.bz2')
def test_get_archive_tarball_name_from_dir(self):
tarball_name = utils_misc.get_archive_tarball_name('/tmp',
None,
'bz2')
self.assertEqual(tarball_name, 'tmp.tar.bz2')
def test_git_repo_param_helper(self):
config = """git_repo_foo_uri = git://git.foo.org/foo.git
git_repo_foo_branch = next
git_repo_foo_lbranch = local
git_repo_foo_commit = bc732ad8b2ed8be52160b893735417b43a1e91a8
"""
config_parser = cartesian_config.Parser()
config_parser.parse_string(config)
params = config_parser.get_dicts().next()
h = build_helper.GitRepoParamHelper(params, 'foo', '/tmp/foo')
self.assertEqual(h.name, 'foo')
self.assertEqual(h.branch, 'next')
self.assertEqual(h.lbranch, 'local')
self.assertEqual(h.commit, 'bc732ad8b2ed8be52160b893735417b43a1e91a8')
def test_normalize_data_size(self):
n1 = utils_misc.normalize_data_size("12M")
n2 = utils_misc.normalize_data_size("1024M", "G")
n3 = utils_misc.normalize_data_size("1024M", "T")
n4 = utils_misc.normalize_data_size("1000M", "G", 1000)
n5 = utils_misc.normalize_data_size("1T", "G", 1000)
n6 = utils_misc.normalize_data_size("1T", "M")
self.assertEqual(n1, "12.0")
self.assertEqual(n2, "1.0")
self.assertEqual(n3, "0.0009765625")
self.assertEqual(n4, "1.0")
self.assertEqual(n5, "1000.0")
self.assertEqual(n6, "1048576.0")
class FakeCmd(object):
def __init__(self, cmd):
self.fake_cmds = [
{"cmd": "numactl --hardware",
"stdout": """
available: 1 nodes (0)
node 0 cpus: 0 1 2 3 4 5 6 7
node 0 size: 18431 MB
node 0 free: 17186 MB
node distances:
node 0
0: 10
"""},
{"cmd": "ps -eLf | awk '{print $4}'",
"stdout": """
1230
1231
1232
1233
1234
1235
1236
1237
"""},
{"cmd": "taskset -p 0x1 1230", "stdout": ""},
{"cmd": "taskset -p 0x2 1231", "stdout": ""},
{"cmd": "taskset -p 0x4 1232", "stdout": ""},
{"cmd": "taskset -p 0x8 1233", "stdout": ""},
{"cmd": "taskset -p 0x10 1234", "stdout": ""},
{"cmd": "taskset -p 0x20 1235", "stdout": ""},
{"cmd": "taskset -p 0x40 1236", "stdout": ""},
{"cmd": "taskset -p 0x80 1237", "stdout": ""},
]
self.stdout = self.get_stdout(cmd)
def get_stdout(self, cmd):
for fake_cmd in self.fake_cmds:
if fake_cmd['cmd'] == cmd:
return fake_cmd['stdout']
raise ValueError("Could not locate locate '%s' on fake cmd db" % cmd)
def utils_run(cmd):
return FakeCmd(cmd)
all_nodes_contents = "0\n"
online_nodes_contents = "0\n"
class TestNumaNode(unittest.TestCase):
def setUp(self):
self.god = mock.mock_god(ut=self)
self.god.stub_with(utils, 'run', utils_run)
all_nodes = tempfile.NamedTemporaryFile(delete=False)
all_nodes.write(all_nodes_contents)
all_nodes.close()
online_nodes = tempfile.NamedTemporaryFile(delete=False)
online_nodes.write(online_nodes_contents)
online_nodes.close()
self.all_nodes_path = all_nodes.name
self.online_nodes_path = online_nodes.name
self.numa_node = utils_misc.NumaNode(-1,
self.all_nodes_path,
self.online_nodes_path)
def test_get_node_cpus(self):
self.assertEqual(self.numa_node.get_node_cpus(0), '0 1 2 3 4 5 6 7')
def test_pin_cpu(self):
self.assertEqual(self.numa_node.pin_cpu("1230"), "0")
self.assertEqual(self.numa_node.dict["0"], ["1230"])
self.assertEqual(self.numa_node.pin_cpu("1231"), "1")
self.assertEqual(self.numa_node.dict["1"], ["1231"])
self.assertEqual(self.numa_node.pin_cpu("1232"), "2")
self.assertEqual(self.numa_node.dict["2"], ["1232"])
self.assertEqual(self.numa_node.pin_cpu("1233"), "3")
self.assertEqual(self.numa_node.dict["3"], ["1233"])
self.assertEqual(self.numa_node.pin_cpu("1234"), "4")
self.assertEqual(self.numa_node.dict["4"], ["1234"])
self.assertEqual(self.numa_node.pin_cpu("1235"), "5")
self.assertEqual(self.numa_node.dict["5"], ["1235"])
self.assertEqual(self.numa_node.pin_cpu("1236"), "6")
self.assertEqual(self.numa_node.dict["6"], ["1236"])
self.assertEqual(self.numa_node.pin_cpu("1237"), "7")
self.assertEqual(self.numa_node.dict["7"], ["1237"])
self.assertTrue("free" not in self.numa_node.dict.values())
def test_free_cpu(self):
self.assertEqual(self.numa_node.pin_cpu("1230"), "0")
self.assertEqual(self.numa_node.dict["0"], ["1230"])
self.assertEqual(self.numa_node.pin_cpu("1231"), "1")
self.assertEqual(self.numa_node.dict["1"], ["1231"])
self.numa_node.free_cpu("0")
self.assertEqual(self.numa_node.dict["0"], [])
self.assertEqual(self.numa_node.dict["1"], ["1231"])
def test_bitlist_to_string(self):
string = 'foo'
bitlist = [0, 1, 1, 0, 0, 1, 1, 0, 0, 1,
1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1]
self.assertEqual(utils_misc.string_to_bitlist(string), bitlist)
def test_string_to_bitlist(self):
bitlist = [0, 1, 1, 0, 0, 0, 1, 0, 0, 1,
1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0]
string = 'bar'
self.assertEqual(utils_misc.bitlist_to_string(bitlist), string)
def tearDown(self):
self.god.unstub_all()
os.unlink(self.all_nodes_path)
os.unlink(self.online_nodes_path)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | -8,474,073,198,443,555,000 | 33.536364 | 78 | 0.562122 | false | 3.263746 | true | false | false |
Pikecillo/genna | external/4Suite-XML-1.0.2/Ft/Lib/Random.py | 1 | 11290 | ########################################################################
# $Header: /var/local/cvsroot/4Suite/Ft/Lib/Random.py,v 1.8 2006/01/13 06:12:55 mbrown Exp $
"""
Thread-safe random number generation
Random number generation capabilities, speed, and thread safety in
stdlib vary from version to version of Python. In addition, attempts to
use an OS-specific random number source can result in unexpected
exceptions being raised. Also, a bug in Python 2.3.0 can lead to a
reduction in entropy, and a bug in Python 2.4.0 and 2.4.1 can result
in exceptions related to open filehandles on some multithreaded Posix
platforms.
This module works around as many of these issues as it can by defining
random number generator classes that can be used safely by multiple
threads, using the best random number sources available. They support
all versions of Python from 2.1 up, and fall back on more reliable
generators when exception conditions occur. In addition, convenience
functions equivalent to random.random() and os.urandom() are exposed.
Copyright 2006 Fourthought, Inc. (USA).
Detailed license and copyright information: http://4suite.org/COPYRIGHT
Project home, documentation, distributions: http://4suite.org/
"""
__all__ = ['urandom', 'FtRandom', 'FtSystemRandom', 'DEFAULT_RNG',
'Random', 'GetRandomBytes']
import random, threading, os, sys
from sys import version_info
py230 = version_info[0:3] == (2, 3, 0)
py23up = version_info[0:2] > (2, 2)
py24up = version_info[0:2] > (2, 3)
py242up = version_info[0:3] > (2, 4, 1)
posix = os.name == 'posix'
win32 = sys.platform == 'win32'
_lock = threading.Lock()
#=============================================================================
# Thread-safe implementation of os.urandom()
# (still raises NotImplementedError when no OS-specific random number source)
#
if win32 and py24up:
urandom = os.urandom
elif posix:
if py242up:
urandom = os.urandom
else:
# Python 2.4.2's os.urandom()
def urandom(n):
"""urandom(n) -> str
Return a string of n random bytes suitable for cryptographic use.
"""
try:
_urandomfd = os.open("/dev/urandom", os.O_RDONLY)
except:
raise NotImplementedError("/dev/urandom (or equivalent) not found")
bytes = ""
while len(bytes) < n:
bytes += os.read(_urandomfd, n - len(bytes))
os.close(_urandomfd)
return bytes
if hasattr(random, '_urandom'):
random._urandom = urandom
else:
def urandom(n):
"""urandom(n) -> str
Return a string of n random bytes suitable for cryptographic use.
"""
raise NotImplementedError("There is no OS-specific random number source.")
#=============================================================================
# FtRandom: a non-crypto-safe PRNG (Mersenne Twister or Wichmann-Hill, made
# thread-safe). By default, seeded from an OS-specific random number source,
# if available.
#
if posix and not py24up:
# posix py2.3 down: use urandom if possible
from binascii import hexlify
def _best_seed(self, a=None):
"""Initialize internal state from hashable object.
None or no argument seeds from current time or from an operating
system specific randomness source if available.
If a is not None or an int or long, hash(a) is used instead.
"""
if a is None:
try:
a = long(hexlify(urandom(16)), 16)
except NotImplementedError:
# posix py2.3.0: use system clock, but avoid buggy stdlib
if py230:
import time
a = long(time.time() * 256)
super(FtRandom, self).seed(a)
elif py230:
# win32 py2.3.0: use system clock, but avoid buggy stdlib
def _best_seed(self, a=None):
import time
a = long(time.time() * 256)
super(FtRandom, self).seed(a)
else:
# posix or win32 py2.4 up: urandom if possible, fall back on system clock
# win32 py2.3 down: system clock only
_best_seed = random.Random.seed
# random.Random.gauss() is not thread-safe
def _gauss(self, *args, **kwargs):
"""Gaussian distribution.
mu is the mean, and sigma is the standard deviation.
Thread-safe.
"""
_lock.acquire()
rv = super(self.__class__, self).gauss(*args, **kwargs)
_lock.release()
return rv
if py23up:
# Mersenne Twister, already thread-safe
_random = random.Random.random
def _getrandbytes(self, k):
"""getrandbytes(k) -> x. Returns k random bytes as a str."""
bytes = ""
while len(bytes) < k:
n = super(FtRandom, self).random()
bytes += chr(int(n * 256))
return bytes
else:
# Wichmann-Hill, made thread-safe
def _random(self):
"""Get the next random number in the range [0.0, 1.0)."""
_lock.acquire()
n = super(FtRandom, self).random()
_lock.release()
return n
def _getrandbytes(self, k):
"""getrandbytes(k) -> x. Returns k random bytes as a str."""
bytes = ""
_lock.acquire()
while len(bytes) < k:
n = super(FtRandom, self).random()
bytes += chr(int(n * 256))
_lock.release()
return bytes
if py24up:
_getrandbits = random.Random.getrandbits
else:
# This is the py2.4 implementation
from binascii import hexlify
def _getrandbits(self, k):
"""getrandbits(k) -> x. Generates a long int with k random bits."""
if k <= 0:
raise ValueError('number of bits must be greater than zero')
if k != int(k):
raise TypeError('number of bits should be an integer')
bytes = (k + 7) // 8 # bits / 8 and rounded up
x = long(hexlify(self.getrandbytes(bytes)), 16)
return x >> (bytes * 8 - k) # trim excess bits
class FtRandom(random.Random, object):
"""
The best available OS-agnostic PRNG, thread-safe.
Implements getrandbits() in all versions of Python.
Also adds getrandbytes(), which returns a str of bytes.
"""
seed = _best_seed
gauss = _gauss
random = _random
getrandbits = _getrandbits
getrandbytes = _getrandbytes
def __init__(self, *args, **kwargs):
return super(FtRandom, self).__init__(*args, **kwargs)
#=============================================================================
# FtSystemRandom: a PRNG that uses an OS-specific random number source, if
# available, falling back on an instance of FtRandom. It is as crypto-safe as
# the OS-specific random number source, when such a source is available.
# Calls to seed() and jumpahead() only affect the fallback FtRandom instance.
#
if win32 and not py24up:
# don't bother trying OS-specific sources on win32 before py2.4
def _random(self):
"""Get the next random number in the range [0.0, 1.0)."""
return self._fallback_prng.random()
def _getrandbits(self, k):
"""getrandbits(k) -> x. Generates a long int with k random bits."""
return self._fallback_prng.getrandbits(k)
def _getrandbytes(self, k):
"""getrandbytes(k) -> x. Returns k random bytes as a str."""
return self._fallback_prng.getrandbytes(k)
else:
# Functions that read random numbers from OS-specific sources
# Use random() and getrandbits() from random.SystemRandom.
# We've already replaced random._urandom with our urandom, so it's OK.
try:
# py2.4 up...
from random import SystemRandom as _SystemRandom
_sr_random = _SystemRandom.random.im_func
_sr_getrandbits = _SystemRandom.getrandbits.im_func
except ImportError:
# py2.3 down, posix (since we tested for win32 above)...
# These are based on the py2.4 implementation.
from binascii import hexlify
_BPF = 53 # Number of bits in a float
_RECIP_BPF = 2**-_BPF
def _sr_random(self):
"""Get the next random number in the range [0.0, 1.0)."""
return (long(hexlify(urandom(7)), 16) >> 3) * _RECIP_BPF
def _sr_getrandbits(self, k):
"""getrandbits(k) -> x. Generates a long int with k random bits."""
if k <= 0:
raise ValueError('number of bits must be greater than zero')
if k != int(k):
raise TypeError('number of bits should be an integer')
bytes = (k + 7) // 8 # bits / 8 and rounded up
x = long(hexlify(urandom(bytes)), 16)
return x >> (bytes * 8 - k) # trim excess bits
# Wrapper functions that try OS-specific sources first, then fall back
def _random(self):
"""Get the next random number in the range [0.0, 1.0)."""
try:
return _sr_random(self)
except NotImplementedError:
return self._fallback_prng.random()
def _getrandbits(self, *args, **kwargs):
"""getrandbits(k) -> x. Generates a long int with k random bits."""
try:
return _sr_getrandbits(self, *args, **kwargs)
except NotImplementedError:
return self._fallback_prng.getrandbits(*args, **kwargs)
def _getrandbytes(self, k):
"""getrandbytes(k) -> x. Returns k random bytes as a str."""
try:
return urandom(k)
except NotImplementedError:
return self._fallback_prng.getrandbytes(k)
class FtSystemRandom(FtRandom):
"""
A PRNG that uses an OS-specific random number source, if
available, falling back on an instance of FtRandom.
Calls to seed(), jumpahead(), getstate() and setstate() only affect
the fallback FtRandom instance.
Implements getrandbits() in all versions of Python.
Also adds getrandbytes(), which returns a str of bytes.
"""
random = _random
getrandbits = _getrandbits
getrandbytes = _getrandbytes
def __init__(self, *args, **kwargs):
self._fallback_prng = FtRandom()
return super(FtSystemRandom, self).__init__(*args, **kwargs)
def seed(self, *args, **kwargs):
"""Seed the fallback PRNG (an instance of FtRandom)"""
return self._fallback_prng.seed(*args, **kwargs)
def jumpahead(self, *args, **kwargs):
"""Make the fallback PRNG (an instance of FtRandom) jump ahead"""
return self._fallback_prng.jumpahead(*args, **kwargs)
def getstate(self):
"""Return internal state; can be passed to setstate() later."""
return self._fallback_prng.getstate()
def setstate(self, state):
"""Restore internal state from object returned by getstate()."""
self._fallback_prng.setstate(state)
return
#=============================================================================
# convenience functions
#
DEFAULT_RNG = FtSystemRandom()
def Random():
"""Returns a random float, n, where 0 <= n < 1"""
return DEFAULT_RNG.random()
def GetRandomBytes(numBytes):
"""
Returns a string of random bytes from the best RNG available.
Equivalent to os.urandom(), but failsafe.
"""
return DEFAULT_RNG.getrandbytes(numBytes)
| gpl-2.0 | 5,651,266,429,356,690,000 | 37.141892 | 92 | 0.601683 | false | 3.89579 | false | false | false |
gnowledge/ncert_nroer | gstudio/gnowql.py | 3 | 2589 | # Copyright (c) 2011, 2012 Free Software Foundation
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gstudio.models import *
from objectapp.models import *
from reversion.models import Version
MAP = (
('objecttype','Objecttype'),
('gbobject', 'Gbobject')
)
def get_slug(name):
"""
returns the uri of the node.
"""
try:
"""
note: its very good to use only the meta information given for Version data,
individual object information is best retrieved from the main table.
# Also Version.object.meta contains a lot of valuable information about the model.
"""
node = NID.objects.get(title=str(name))
# Retrieving only the relevant tupleset for the versioned objects
vrs = Version.objects.filter(type=0 , object_id=node.id)
vrs = vrs[0]
except:
return "The item was not found."
return vrs.object.get_absolute_url()
def get_nodetype(name):
"""
returns the model the id belongs to.
"""
try:
"""
ALGO: get object id, go to version model, return for the given id.
"""
node = NID.objects.get(title=str(name))
# Retrieving only the relevant tupleset for the versioned objects
vrs = Version.objects.filter(type=0 , object_id=node.id)
# Returned value is a list, so splice it .
vrs = vrs[0]
except:
return "The item was not found."
return vrs.object._meta.module_name
def get_node(name):
"""
returns a reference to the model object
"""
nt = get_nodetype(name)
node = NID.objects.get(title=str(name))
this_id = node.id
if (nt == 'gbobject'):
return Gbobject.objects.get(id=this_id)
if (nt == 'objecttype'):
return Objecttype.objects.get(id=this_id)
if (nt == 'metatype'):
return Metatype.objects.get(id=this_id)
| agpl-3.0 | -4,225,464,799,175,490,600 | 29.458824 | 90 | 0.634608 | false | 3.875749 | false | false | false |
westernx/sgpublish | sgpublish/mayatools/publish_scene.py | 1 | 5484 | from __future__ import absolute_import
import traceback
import time
import sys
import subprocess
import platform
import tempfile
import os
import re
import glob
import functools
import datetime
import itertools
from concurrent.futures import ThreadPoolExecutor
from PyQt4 import QtCore, QtGui
Qt = QtCore.Qt
from maya import cmds
from sgfs import SGFS
from sgpublish import uiutils as ui_utils
from sgpublish import utils
from sgpublish.exporter import maya as io_maya
from sgpublish.exporter.ui.publish import maya as ui_publish
from sgpublish.exporter.ui.publish.generic import PublishSafetyError
def basename(src_path=None):
basename = os.path.basename(src_path or cmds.file(q=True, sceneName=True) or 'untitled')
basename = os.path.splitext(basename)[0]
basename = re.sub(r'_*[rv]\d+', '', basename)
return basename
class SceneExporter(io_maya.Exporter):
def __init__(self, **kwargs):
kwargs.setdefault('filename_hint', basename())
kwargs.setdefault('publish_type', 'maya_scene')
super(SceneExporter, self).__init__(**kwargs)
def export_publish(self, publisher, **kwargs):
# Save the file into the directory.
src_path = cmds.file(q=True, sceneName=True)
src_ext = os.path.splitext(src_path)[1]
try:
dst_path = os.path.join(publisher.directory, os.path.basename(src_path))
maya_type = 'mayaBinary' if src_ext == '.mb' else 'mayaAscii'
cmds.file(rename=dst_path)
cmds.file(save=True, type=maya_type)
finally:
cmds.file(rename=src_path)
# Set the primary path.
publisher.path = dst_path
class PublishWidget(ui_publish.Widget):
def safety_check(self, **kwargs):
if not super(PublishWidget, self).safety_check(**kwargs):
return False
# Make sure they want to proceed if there are changes to the file.
if cmds.file(q=True, modified=True):
res = QtGui.QMessageBox.warning(self,
"Unsaved Changes",
"Would you like to save your changes before publishing this"
" file? The publish will have the changes either way.",
QtGui.QMessageBox.Save | QtGui.QMessageBox.No | QtGui.QMessageBox.Cancel,
QtGui.QMessageBox.Save
)
if res & QtGui.QMessageBox.Cancel:
return False
if res & QtGui.QMessageBox.Save:
cmds.file(save=True)
return True
class Dialog(QtGui.QDialog):
def __init__(self, exceptions=None):
super(Dialog, self).__init__()
self._setup_ui()
def _setup_ui(self):
self.setWindowTitle('Scene Publisher')
self.setLayout(QtGui.QVBoxLayout())
self._exporter = SceneExporter()
self._publish_widget = PublishWidget(self._exporter)
self._publish_widget.layout().setContentsMargins(0, 0, 0, 0)
self.layout().addWidget(self._publish_widget)
self._publish_widget.beforeScreenshot.connect(self.hide)
self._publish_widget.afterScreenshot.connect(self.show)
button = QtGui.QPushButton('Publish')
button.clicked.connect(self._on_submit)
self.layout().addLayout(ui_utils.vbox(button))
self._publish_widget.beforePlayblast.connect(self._before_playblast)
self._publish_widget.afterPlayblast.connect(self._after_playblast)
self._msgbox = None
def _before_playblast(self):
self.hide()
def _after_playblast(self):
self.show()
def _on_submit(self, *args):
# DO IT.
# This runs the safety check.
try:
publisher = self._publish_widget.export()
except PublishSafetyError:
return
# It was an export, instead of a publish.
if not publisher:
return
ui_utils.announce_publish_success(
publisher,
message="Version {publisher.version} of \"{publisher.name}\" has"
" been published. Remember to version up!"
)
self.close()
def __before_reload__():
# We have to manually clean this, since we aren't totally sure it will
# always fall out of scope.
global dialog
if dialog:
dialog.close()
dialog.destroy()
dialog = None
dialog = None
def run():
global dialog
if dialog:
dialog.close()
# Be cautious if the scene was never saved
filename = cmds.file(query=True, sceneName=True)
if not filename:
res = QtGui.QMessageBox.warning(None, 'Unsaved Scene', 'This scene has not beed saved. Continue anyways?',
QtGui.QMessageBox.Yes | QtGui.QMessageBox.No,
QtGui.QMessageBox.No
)
if res & QtGui.QMessageBox.No:
return
workspace = cmds.workspace(q=True, rootDirectory=True)
if filename and not filename.startswith(workspace):
res = QtGui.QMessageBox.warning(None, 'Mismatched Workspace', 'This scene is not from the current workspace. Continue anyways?',
QtGui.QMessageBox.Yes | QtGui.QMessageBox.No,
QtGui.QMessageBox.No
)
if res & QtGui.QMessageBox.No:
return
dialog = Dialog()
dialog.show()
| bsd-3-clause | -3,621,045,263,396,236,000 | 28.643243 | 136 | 0.610321 | false | 4.18626 | false | false | false |
telamonian/saga-python | src/saga/adaptors/cpi/advert/entry.py | 10 | 1833 |
__author__ = "Andre Merzky"
__copyright__ = "Copyright 2012-2013, The SAGA Project"
__license__ = "MIT"
import saga.adaptors.cpi.decorators as cpi_dec
import saga.adaptors.cpi.namespace as cpi_ns
import saga.adaptors.cpi.attributes as cpi_att
SYNC = cpi_dec.CPI_SYNC_CALL
ASYNC = cpi_dec.CPI_ASYNC_CALL
# keep order of inheritance! super() below uses MRO
class Entry (cpi_ns.entry.Entry,
cpi_att.Attributes) :
# ----------------------------------------------------------------
#
# initialization methods
#
def __init__ (self, api, adaptor) :
self._cpi_nsentry = super (Entry, self)
self._cpi_nsentry.__init__ (api, adaptor)
@SYNC
def init_instance (self, url, flags, session) : pass
@ASYNC
def init_instance_async (self, url, flags, session) : pass
# ----------------------------------------------------------------
#
# advert methods
#
@SYNC
def set_ttl (self, ttl, ttype=None) : pass
@ASYNC
def set_ttl_async (self, ttl, ttype=None) : pass
@SYNC
def get_ttl (self, ttype) : pass
@ASYNC
def get_ttl_async (self, ttype) : pass
@SYNC
def store_object (self, object, ttype) : pass
@ASYNC
def store_object_async (self, object, ttype) : pass
@SYNC
def retrieve_object (self, ttype) : pass
@ASYNC
def retrieve_object_async (self, ttype) : pass
@SYNC
def delete_object (self, ttype) : pass
@ASYNC
def delete_object_async (self, ttype) : pass
| mit | -25,864,402,350,303,360 | 26.772727 | 75 | 0.469722 | false | 3.834728 | false | false | false |
praba230890/PYPOWER | pypower/case30.py | 3 | 7349 | # Copyright (c) 1996-2015 PSERC. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
"""Power flow data for 30 bus, 6 generator case.
"""
from numpy import array
def case30():
"""Power flow data for 30 bus, 6 generator case.
Please see L{caseformat} for details on the case file format.
Based on data from ...
Alsac, O. & Stott, B., I{"Optimal Load Flow with Steady State Security"},
IEEE Transactions on Power Apparatus and Systems, Vol. PAS 93, No. 3,
1974, pp. 745-751.
... with branch parameters rounded to nearest 0.01, shunt values divided
by 100 and shunt on bus 10 moved to bus 5, load at bus 5 zeroed out.
Generator locations, costs and limits and bus areas were taken from ...
Ferrero, R.W., Shahidehpour, S.M., Ramesh, V.C., I{"Transaction analysis
in deregulated power systems using game theory"}, IEEE Transactions on
Power Systems, Vol. 12, No. 3, Aug 1997, pp. 1340-1347.
Generator Q limits were derived from Alsac & Stott, using their Pmax
capacities. V limits and line |S| limits taken from Alsac & Stott.
@return: Power flow data for 30 bus, 6 generator case.
@see: U{http://www.pserc.cornell.edu/matpower/}
"""
ppc = {"version": '2'}
##----- Power Flow Data -----##
## system MVA base
ppc["baseMVA"] = 100.0
## bus data
# bus_i type Pd Qd Gs Bs area Vm Va baseKV zone Vmax Vmin
ppc["bus"] = array([
[1, 3, 0, 0, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95],
[2, 2, 21.7, 12.7, 0, 0, 1, 1, 0, 135, 1, 1.1, 0.95],
[3, 1, 2.4, 1.2, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95],
[4, 1, 7.6, 1.6, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95],
[5, 1, 0, 0, 0, 0.19, 1, 1, 0, 135, 1, 1.05, 0.95],
[6, 1, 0, 0, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95],
[7, 1, 22.8, 10.9, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95],
[8, 1, 30, 30, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95],
[9, 1, 0, 0, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95],
[10, 1, 5.8, 2, 0, 0, 3, 1, 0, 135, 1, 1.05, 0.95],
[11, 1, 0, 0, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95],
[12, 1, 11.2, 7.5, 0, 0, 2, 1, 0, 135, 1, 1.05, 0.95],
[13, 2, 0, 0, 0, 0, 2, 1, 0, 135, 1, 1.1, 0.95],
[14, 1, 6.2, 1.6, 0, 0, 2, 1, 0, 135, 1, 1.05, 0.95],
[15, 1, 8.2, 2.5, 0, 0, 2, 1, 0, 135, 1, 1.05, 0.95],
[16, 1, 3.5, 1.8, 0, 0, 2, 1, 0, 135, 1, 1.05, 0.95],
[17, 1, 9, 5.8, 0, 0, 2, 1, 0, 135, 1, 1.05, 0.95],
[18, 1, 3.2, 0.9, 0, 0, 2, 1, 0, 135, 1, 1.05, 0.95],
[19, 1, 9.5, 3.4, 0, 0, 2, 1, 0, 135, 1, 1.05, 0.95],
[20, 1, 2.2, 0.7, 0, 0, 2, 1, 0, 135, 1, 1.05, 0.95],
[21, 1, 17.5, 11.2, 0, 0, 3, 1, 0, 135, 1, 1.05, 0.95],
[22, 2, 0, 0, 0, 0, 3, 1, 0, 135, 1, 1.1, 0.95],
[23, 2, 3.2, 1.6, 0, 0, 2, 1, 0, 135, 1, 1.1, 0.95],
[24, 1, 8.7, 6.7, 0, 0.04, 3, 1, 0, 135, 1, 1.05, 0.95],
[25, 1, 0, 0, 0, 0, 3, 1, 0, 135, 1, 1.05, 0.95],
[26, 1, 3.5, 2.3, 0, 0, 3, 1, 0, 135, 1, 1.05, 0.95],
[27, 2, 0, 0, 0, 0, 3, 1, 0, 135, 1, 1.1, 0.95],
[28, 1, 0, 0, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95],
[29, 1, 2.4, 0.9, 0, 0, 3, 1, 0, 135, 1, 1.05, 0.95],
[30, 1, 10.6, 1.9, 0, 0, 3, 1, 0, 135, 1, 1.05, 0.95]
])
## generator data
# bus, Pg, Qg, Qmax, Qmin, Vg, mBase, status, Pmax, Pmin, Pc1, Pc2,
# Qc1min, Qc1max, Qc2min, Qc2max, ramp_agc, ramp_10, ramp_30, ramp_q, apf
ppc["gen"] = array([
[1, 23.54, 0, 150, -20, 1, 100, 1, 80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[2, 60.97, 0, 60, -20, 1, 100, 1, 80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[22, 21.59, 0, 62.5, -15, 1, 100, 1, 50, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[27, 26.91, 0, 48.7, -15, 1, 100, 1, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[23, 19.2, 0, 40, -10, 1, 100, 1, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[13, 37, 0, 44.7, -15, 1, 100, 1, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
])
## branch data
# fbus, tbus, r, x, b, rateA, rateB, rateC, ratio, angle, status, angmin, angmax
ppc["branch"] = array([
[1, 2, 0.02, 0.06, 0.03, 130, 130, 130, 0, 0, 1, -360, 360],
[1, 3, 0.05, 0.19, 0.02, 130, 130, 130, 0, 0, 1, -360, 360],
[2, 4, 0.06, 0.17, 0.02, 65, 65, 65, 0, 0, 1, -360, 360],
[3, 4, 0.01, 0.04, 0, 130, 130, 130, 0, 0, 1, -360, 360],
[2, 5, 0.05, 0.2, 0.02, 130, 130, 130, 0, 0, 1, -360, 360],
[2, 6, 0.06, 0.18, 0.02, 65, 65, 65, 0, 0, 1, -360, 360],
[4, 6, 0.01, 0.04, 0, 90, 90, 90, 0, 0, 1, -360, 360],
[5, 7, 0.05, 0.12, 0.01, 70, 70, 70, 0, 0, 1, -360, 360],
[6, 7, 0.03, 0.08, 0.01, 130, 130, 130, 0, 0, 1, -360, 360],
[6, 8, 0.01, 0.04, 0, 32, 32, 32, 0, 0, 1, -360, 360],
[6, 9, 0, 0.21, 0, 65, 65, 65, 0, 0, 1, -360, 360],
[6, 10, 0, 0.56, 0, 32, 32, 32, 0, 0, 1, -360, 360],
[9, 11, 0, 0.21, 0, 65, 65, 65, 0, 0, 1, -360, 360],
[9, 10, 0, 0.11, 0, 65, 65, 65, 0, 0, 1, -360, 360],
[4, 12, 0, 0.26, 0, 65, 65, 65, 0, 0, 1, -360, 360],
[12, 13, 0, 0.14, 0, 65, 65, 65, 0, 0, 1, -360, 360],
[12, 14, 0.12, 0.26, 0, 32, 32, 32, 0, 0, 1, -360, 360],
[12, 15, 0.07, 0.13, 0, 32, 32, 32, 0, 0, 1, -360, 360],
[12, 16, 0.09, 0.2, 0, 32, 32, 32, 0, 0, 1, -360, 360],
[14, 15, 0.22, 0.2, 0, 16, 16, 16, 0, 0, 1, -360, 360],
[16, 17, 0.08, 0.19, 0, 16, 16, 16, 0, 0, 1, -360, 360],
[15, 18, 0.11, 0.22, 0, 16, 16, 16, 0, 0, 1, -360, 360],
[18, 19, 0.06, 0.13, 0, 16, 16, 16, 0, 0, 1, -360, 360],
[19, 20, 0.03, 0.07, 0, 32, 32, 32, 0, 0, 1, -360, 360],
[10, 20, 0.09, 0.21, 0, 32, 32, 32, 0, 0, 1, -360, 360],
[10, 17, 0.03, 0.08, 0, 32, 32, 32, 0, 0, 1, -360, 360],
[10, 21, 0.03, 0.07, 0, 32, 32, 32, 0, 0, 1, -360, 360],
[10, 22, 0.07, 0.15, 0, 32, 32, 32, 0, 0, 1, -360, 360],
[21, 22, 0.01, 0.02, 0, 32, 32, 32, 0, 0, 1, -360, 360],
[15, 23, 0.1, 0.2, 0, 16, 16, 16, 0, 0, 1, -360, 360],
[22, 24, 0.12, 0.18, 0, 16, 16, 16, 0, 0, 1, -360, 360],
[23, 24, 0.13, 0.27, 0, 16, 16, 16, 0, 0, 1, -360, 360],
[24, 25, 0.19, 0.33, 0, 16, 16, 16, 0, 0, 1, -360, 360],
[25, 26, 0.25, 0.38, 0, 16, 16, 16, 0, 0, 1, -360, 360],
[25, 27, 0.11, 0.21, 0, 16, 16, 16, 0, 0, 1, -360, 360],
[28, 27, 0, 0.4, 0, 65, 65, 65, 0, 0, 1, -360, 360],
[27, 29, 0.22, 0.42, 0, 16, 16, 16, 0, 0, 1, -360, 360],
[27, 30, 0.32, 0.6, 0, 16, 16, 16, 0, 0, 1, -360, 360],
[29, 30, 0.24, 0.45, 0, 16, 16, 16, 0, 0, 1, -360, 360],
[8, 28, 0.06, 0.2, 0.02, 32, 32, 32, 0, 0, 1, -360, 360],
[6, 28, 0.02, 0.06, 0.01, 32, 32, 32, 0, 0, 1, -360, 360]
])
##----- OPF Data -----##
## area data
# area refbus
ppc["areas"] = array([
[1, 8],
[2, 23],
[3, 26],
])
## generator cost data
# 1 startup shutdown n x1 y1 ... xn yn
# 2 startup shutdown n c(n-1) ... c0
ppc["gencost"] = array([
[2, 0, 0, 3, 0.02, 2, 0],
[2, 0, 0, 3, 0.0175, 1.75, 0],
[2, 0, 0, 3, 0.0625, 1, 0],
[2, 0, 0, 3, 0.00834, 3.25, 0],
[2, 0, 0, 3, 0.025, 3, 0],
[2, 0, 0, 3, 0.025, 3, 0]
])
return ppc
| bsd-3-clause | -7,030,047,634,823,100,000 | 46.720779 | 85 | 0.443053 | false | 2.102718 | false | false | false |
jhamilius/chain | scripts/json2csv.py | 1 | 1104 | #!/usr/bin/env python
#Convert json array file in csv file
#import
import ijson
import csv
import time
import sys
import os
import datetime
os.chdir('/home/julien_ha/data')
#Start timer
start_time = time.time()
input_file = sys.argv[1]
output_file = input_file.replace('.json','.csv')
#Parse json file
f1 = open(input_file)
d1 = []
for item in ijson.items(f1, "item"):
d1.append(item)
f = csv.writer(open(output_file, "wb+"))
# Write CSV Header, If you dont need that, remove this line
f.writerow(["blockHash","blockNumber","from","gas","gasPrice","hash","input","nonce","timestamp","to","transactionIndex","value"])
for t in d1:
f.writerow([t["blockHash"],
t["blockNumber"],
t["from"],
t["gas"],
t["gasPrice"],
t["hash"],
t["input"],
t["nonce"],
t["timestamp"],
t["to"],
t["transactionIndex"],
t["value"]
])
#Print time duration
print("Done in --- %s seconds ---" % (time.time() - start_time)) | mit | -1,198,574,196,205,953,800 | 22.020833 | 130 | 0.54529 | false | 3.538462 | false | false | false |
jnsebgosselin/WHAT | gwhat/meteo/weather_station_finder.py | 1 | 8241 | # -*- coding: utf-8 -*-
# Copyright © 2014-2018 GWHAT Project Contributors
# https://github.com/jnsebgosselin/gwhat
#
# This file is part of GWHAT (Ground-Water Hydrograph Analysis Toolbox).
# Licensed under the terms of the GNU General Public License.
# ---- Imports: standard libraries
from urllib.request import urlopen
from urllib.error import HTTPError, URLError
import csv
import time
import os
import os.path as osp
# ---- Imports: third parties
import numpy as np
from PyQt5.QtCore import QObject
from PyQt5.QtCore import pyqtSignal as QSignal
# ---- Imports: local libraries
from gwhat.common.utils import calc_dist_from_coord
from gwhat.meteo.weather_stationlist import WeatherSationList
from gwhat import __rootdir__
DATABASE_FILEPATH = osp.join(__rootdir__, 'climate_station_database.npy')
MAX_FAILED_FETCH_TRY = 3
PROV_NAME_ABB = [('ALBERTA', 'AB'),
('BRITISH COLUMBIA', 'BC'),
('MANITOBA', 'MB'),
('NEW BRUNSWICK', 'NB'),
('NEWFOUNDLAND', 'NL'),
('NORTHWEST TERRITORIES', 'NT'),
('NOVA SCOTIA', 'NS'),
('NUNAVUT', 'NU'),
('ONTARIO', 'ON'),
('PRINCE EDWARD ISLAND', 'PE'),
('QUEBEC', 'QC'),
('SASKATCHEWAN', 'SK'),
('YUKON TERRITORY', 'YT')]
# ---- Base functions
URL_TOR = ("ftp://client_climate@ftp.tor.ec.gc.ca/" +
"Pub/Get_More_Data_Plus_de_donnees/Station%20Inventory%20EN.csv")
def read_stationlist_from_tor():
""""Read and format the `Station Inventory En.csv` file from Tor ftp."""
try:
data = urlopen(URL_TOR).read()
except (HTTPError, URLError):
return None
try:
data = data.decode('utf-8-sig').splitlines()
except (UnicodeDecodeError, UnicodeError):
return None
data = list(csv.reader(data, delimiter=','))
FIELDS_KEYS_TYPE = [('Name', 'Name', str),
('Province', 'Province', str),
('Climate ID', 'ID', str),
('Station ID', 'Station ID', str),
('DLY First Year', 'DLY First Year', int),
('DLY Last Year', 'DLY Last Year', int),
('Latitude (Decimal Degrees)', 'Latitude', float),
('Longitude (Decimal Degrees)', 'Longitude', float),
('Elevation (m)', 'Elevation', float)]
df = {}
columns = None
for i, row in enumerate(data):
if len(row) == 0:
continue
if row[0] == 'Name':
columns = row
data = np.array(data[i+1:])
# Remove stations with no daily data
hly_first_year = data[:, columns.index('DLY First Year')]
data = data[~(hly_first_year == ''), :]
break
else:
return None
for field, key, atype in FIELDS_KEYS_TYPE:
arr = data[:, columns.index(field)]
if atype == float:
arr[arr == ''] = np.nan
else:
arr[arr == ''] = 'NA'
df[key] = arr.astype(atype)
# Sanitarize station name.
for i in range(len(df['Name'])):
df['Name'][i].replace('\\', ' ').replace('/', ' ')
# Determine station status.
df['Status'] = np.zeros(len(df['Name'])).astype(str)
df['Status'][df['DLY Last Year'] >= 2017] = 'Active'
df['Status'][df['DLY Last Year'] < 2017] = 'Closed'
# Format province value.
for name, abb in PROV_NAME_ABB:
df['Province'][df['Province'] == name] = abb
return df
# ---- API
class WeatherStationFinder(QObject):
sig_progress_msg = QSignal(str)
sig_load_database_finished = QSignal(bool)
def __init__(self, filelist=None, *args, **kwargs):
super(WeatherStationFinder, self).__init__(*args, **kwargs)
self._data = None
# ---- Load and fetch database
@property
def data(self):
"""Content of the ECCC database."""
return self._data
def load_database(self):
"""
Load the climate station list from a file if it exist or else fetch it
from ECCC Tor ftp server.
"""
if os.path.exists(DATABASE_FILEPATH):
self.sig_progress_msg.emit(
"Loading the climate station database from file.")
ts = time.time()
self._data = np.load(DATABASE_FILEPATH).item()
te = time.time()
print("Station list loaded sucessfully in %0.2f sec." % (te-ts))
self.sig_load_database_finished.emit(True)
else:
self.fetch_database()
def fetch_database(self):
"""
Fetch and read the list of climate stations with daily data
from the ECCC Tor ftp server and save the result on disk.
"""
print("Fetching station list from ECCC Tor ftp server...")
ts = time.time()
self._data = None
failed_fetch_try = 0
while True:
self.sig_progress_msg.emit("Fetching the climate station database"
" from the ECCC server...")
self._data = read_stationlist_from_tor()
if self._data is None:
failed_fetch_try += 1
if failed_fetch_try <= MAX_FAILED_FETCH_TRY:
print("Failed to fetch the database from "
" the ECCC server (%d/%d)."
% (failed_fetch_try, MAX_FAILED_FETCH_TRY))
time.sleep(3)
else:
msg = "Failed to fetch the database from the ECCC server."
print(msg)
self.sig_progress_msg.emit(msg)
break
else:
np.save(DATABASE_FILEPATH, self._data)
te = time.time()
print("Station list fetched sucessfully in %0.2f sec."
% (te-ts))
break
self.sig_load_database_finished.emit(True)
# ---- Utility functions
def get_stationlist(self, status=None, prov=None, prox=None, yrange=None):
"""
Return a list of the stations in the ECCC database that
fulfill the conditions specified in arguments.
"""
N = len(self.data['Name'])
results = np.ones(N)
if prov:
results = results * np.isin(self.data['Province'], prov)
if status:
results = results * (self.data['Status'] == status)
if prox:
lat1, lon1, max_dist = prox
lat2, lon2 = self.data['Latitude'], self.data['Longitude']
dists = calc_dist_from_coord(lat1, lon1, lat2, lon2)
results = results * (dists <= max_dist)
if yrange:
arr_ymin = np.max(np.vstack([self.data['DLY First Year'],
np.ones(N)*yrange[0]]), axis=0)
arr_ymax = np.min(np.vstack([self.data['DLY Last Year'],
np.ones(N)*yrange[1]]), axis=0)
results = results * ((arr_ymax-arr_ymin+1) >= yrange[2])
indexes = np.where(results == 1)[0]
stations = np.vstack((self.data['Name'][indexes],
self.data['Station ID'][indexes],
self.data['DLY First Year'][indexes],
self.data['DLY Last Year'][indexes],
self.data['Province'][indexes],
self.data['ID'][indexes],
self.data['Latitude'][indexes],
self.data['Longitude'][indexes],
self.data['Elevation'][indexes],
)).transpose().tolist()
stationlist = WeatherSationList()
stationlist.add_stations(stations)
return stationlist
if __name__ == '__main__':
stn_browser = WeatherStationFinder()
stn_browser.load_database()
stnlist = stn_browser.get_stationlist(prov=['QC', 'ON'],
prox=(45.40, -73.15, 25),
yrange=(1960, 2015, 10))
| gpl-3.0 | -8,988,214,428,498,387,000 | 34.364807 | 78 | 0.51784 | false | 3.929423 | false | false | false |
shawncaojob/LC | PY/259_3sum_smaller.py | 1 | 1215 | # 259. 3Sum Smaller Add to List
# DescriptionHintsSubmissionsSolutions
# Total Accepted: 23955 Total Submissions: 58266 Difficulty: Medium Contributor: LeetCode
# Given an array of n integers nums and a target, find the number of index triplets i, j, k with 0 <= i < j < k < n that satisfy the condition nums[i] + nums[j] + nums[k] < target.
#
# For example, given nums = [-2, 0, 1, 3], and target = 2.
#
# Return 2. Because there are two triplets which sums are less than 2:
#
# [-2, 0, 1]
# [-2, 0, 3]
# Follow up:
# Could you solve it in O(n2) runtime?
# 2017.05.23
# 3 pointers
class Solution(object):
def threeSumSmaller(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
if len(nums) < 3: return 0
res = 0
nums.sort()
n = len(nums)
i = 0
while i < n - 2:
j, k = i + 1, n - 1
while j < k:
cur = nums[i] + nums[j] + nums[k]
if cur < target:
res += k - j # Key point
j += 1
else:
k -= 1
i += 1
return res
| gpl-3.0 | 8,614,901,401,143,436,000 | 28.634146 | 180 | 0.499588 | false | 3.412921 | false | false | false |
mpasternak/django-flexible-reports | flexible_reports/admin/table.py | 1 | 3373 | # -*- encoding: utf-8 -*-
from django import forms
from django.contrib import admin
from django.utils.text import format_lazy
from django.utils.translation import ugettext_lazy as _
from flexible_reports.models.table import AllSortOptions, SortInGroup
from ..models import Column, ColumnOrder, Table
from .helpers import AverageTextarea, SmallerTextarea, SortableHiddenMixin
class ColumnForm(forms.ModelForm):
class Meta:
widgets = {
'label': SmallerTextarea,
'template': AverageTextarea,
'footer_template': SmallerTextarea,
'attrs': SmallerTextarea
}
class ColumnOrderForm(forms.ModelForm):
def __init__(self, parent, *args, **kw):
super(ColumnOrderForm, self).__init__(*args, **kw)
self.fields['column'].queryset = Column.objects.filter(parent=parent)
class ColumnOrderInline(SortableHiddenMixin, admin.TabularInline):
extra = 0
model = ColumnOrder
fields = ['column', 'desc', 'position']
def formfield_for_foreignkey(self, db_field, request=None, **kwargs):
field = super(ColumnOrderInline, self).formfield_for_foreignkey(db_field, request, **kwargs)
if db_field.name == 'column':
if request._parent_ is not None:
field.queryset = field.queryset.filter(
parent=request._parent_,
sortable=True)
else:
field.queryset = field.queryset.none()
return field
class ColumnInline(SortableHiddenMixin, admin.StackedInline):
extra = 0
model = Column
form = ColumnForm
fields = ['label',
'attr_name',
'template',
'attrs',
'sortable',
'exclude_from_export',
'strip_html_on_export',
'display_totals',
'footer_template',
'position']
class TableForm(forms.ModelForm):
class Meta:
fields = ['label',
'base_model',
'sort_option',
'group_prefix',
'attrs',
'empty_template',
]
widgets = {
'label': SmallerTextarea,
'empty_template': SmallerTextarea,
'attrs': SmallerTextarea
}
pass
@admin.register(Table)
class TableAdmin(admin.ModelAdmin):
list_display = ['label',
'base_model',
'short_sort_option',
'columns']
inlines = [ColumnInline, ColumnOrderInline]
form = TableForm
def columns(self, obj):
return ", ".join([x.label for x in obj.column_set.all()])
columns.short_description = _("Columns")
def short_sort_option(self, obj):
if obj.sort_option == SortInGroup.id:
return format_lazy(
"{label}{group_name}{group_prefix})",
label=SortInGroup.label,
group_name=_(" (group name: "),
group_prefix=obj.group_prefix
)
return AllSortOptions[obj.sort_option].label
short_sort_option.short_description = _("Sort option")
short_sort_option.admin_order_field = "sort_option"
def get_form(self, request, obj=None, **kwargs):
request._parent_ = obj
return super(TableAdmin, self).get_form(request, obj, **kwargs)
| mit | 4,332,384,536,891,170,000 | 29.944954 | 100 | 0.576342 | false | 4.195274 | false | false | false |
blzq/infer | infer/lib/python/inferlib/capture/buck.py | 2 | 10991 | # Copyright (c) 2015 - present Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import logging
import os
import shutil
import subprocess
import tempfile
import traceback
import time
from inferlib import config, issues, utils, bucklib
from . import util
import re
MODULE_NAME = __name__
MODULE_DESCRIPTION = '''Run analysis of code built with a command like:
buck [options] [target]
Analysis examples:
infer -- buck build HelloWorld'''
LANG = ['clang', 'java']
KEEP_GOING_OPTION = "--keep-going"
def gen_instance(*args):
return BuckAnalyzer(*args)
def string_in_quotes(value):
return value.strip('\'')
def create_argparser(group_name=MODULE_NAME):
"""This defines the set of arguments that get added by this module to the
set of global args defined in the infer top-level module
Do not use this function directly, it should be invoked by the infer
top-level module"""
parser = argparse.ArgumentParser(add_help=False)
group = parser.add_argument_group(
'{grp} module'.format(grp=MODULE_NAME),
description=MODULE_DESCRIPTION,
)
group.add_argument('--use-flavors', action='store_true',
help='Run Infer analysis through the use of flavors. '
'Currently this is supported only for the cxx_* '
'targets of Buck - e.g. cxx_library, cxx_binary - '
'and not for Java. Note: this flag should be used '
'in combination with passing the #infer flavor '
'to the Buck target.')
group.add_argument('--xcode-developer-dir',
help='Specify the path to Xcode developer directory '
'(requires --use-flavors to work)')
group.add_argument('--blacklist-regex',
help='Specify the regex for files to skip during '
'the analysis (requires --use-flavors to work)')
group.add_argument('--Xbuck', action='append', default=[],
type=string_in_quotes,
help='Pass values as command-line arguments to '
'invocations of `buck build`.'
'NOTE: value should be wrapped in single quotes')
return parser
class BuckAnalyzer:
def __init__(self, args, cmd):
self.args = args
self.cmd = cmd
self.keep_going = KEEP_GOING_OPTION in self.args.Xbuck
util.log_java_version()
logging.info(util.run_cmd_ignore_fail(['buck', '--version']))
def capture(self):
try:
if self.args.use_flavors:
return self.capture_with_flavors()
else:
return self.capture_without_flavors()
except subprocess.CalledProcessError as exc:
if self.args.debug:
traceback.print_exc()
return exc.returncode
def create_cxx_buck_configuration_args(self):
# return a string that can be passed in input to buck
# and configures the paths to infer/clang/plugin/xcode
facebook_clang_plugins_root = config.FCP_DIRECTORY
clang_path = os.path.join(
facebook_clang_plugins_root,
'clang',
'install',
'bin',
'clang',
)
plugin_path = os.path.join(
facebook_clang_plugins_root,
'libtooling',
'build',
'FacebookClangPlugin.dylib',
)
args = [
'--config',
'*//infer.infer_bin={bin}'
.format(bin=config.BIN_DIRECTORY),
'--config',
'*//infer.clang_compiler={clang}'.format(clang=clang_path),
'--config',
'*//infer.clang_plugin={plugin}'.format(plugin=plugin_path),
'--config',
'*//cxx.pch_enabled=false',
] + self.args.Xbuck
if self.args.xcode_developer_dir is not None:
args.append('--config')
args.append('apple.xcode_developer_dir={devdir}'.format(
devdir=self.args.xcode_developer_dir))
if self.args.blacklist_regex:
args.append('--config')
args.append('*//infer.blacklist_regex={regex}'.format(
regex=self.args.blacklist_regex))
return args
def _get_analysis_result_paths(self):
# TODO(8610738): Make targets extraction smarter
buck_results_cmd = [
self.cmd[0],
'targets',
'--show-output'
] + self.cmd[2:] + self.create_cxx_buck_configuration_args()
buck_results_cmd = \
[x for x in buck_results_cmd if x != KEEP_GOING_OPTION]
proc = subprocess.Popen(buck_results_cmd, stdout=subprocess.PIPE)
(buck_output, _) = proc.communicate()
if proc.returncode != 0:
return None
# remove target name prefixes from each line and split them into a list
out = [x.split(None, 1)[1] for x in buck_output.strip().split('\n')]
return [os.path.dirname(x)
if os.path.isfile(x) else x
for x in out if os.path.exists(x)]
@staticmethod
def _merge_infer_dep_files(root_paths, merged_out_path):
potential_dep_files = [os.path.join(p, config.INFER_BUCK_DEPS_FILENAME)
for p in root_paths]
dep_files = filter(os.path.exists, potential_dep_files)
utils.merge_and_dedup_files_into_path(dep_files, merged_out_path)
@staticmethod
def _merge_infer_report_files(root_paths, merged_out_path):
potential_report_files = [os.path.join(p, config.JSON_REPORT_FILENAME)
for p in root_paths]
report_files = filter(os.path.exists, potential_report_files)
all_results = issues.merge_reports_from_paths(report_files)
utils.dump_json_to_path(all_results, merged_out_path)
@staticmethod
def _find_deps_and_merge(merged_out_path):
"""This function is used to compute the infer-deps.txt file that
contains the location of the infer-out folders with the captured
files created by buck. This is needed when keep-going is passed
to buck and there are compilation failures, because in that case
buck doesn't create this file."""
infer_out_folders = []
start_time = time.time()
print('finding captured files in buck-out...')
for root, dirs, files in os.walk(config.BUCK_OUT_GEN):
regex = re.compile('.*infer-out.*')
folders = \
[os.path.join(root, d) for d in dirs if re.match(regex, d)]
for d in folders:
if d not in infer_out_folders:
infer_out_folders.append(d)
with open(merged_out_path, 'w') as fmerged_out_path:
for dir in infer_out_folders:
fmerged_out_path.write('\t' + '\t' + dir + '\n')
elapsed_time = time.time() - start_time
print('time elapsed in finding captured files in buck-out: % 6.2fs'
% elapsed_time)
def _move_buck_out(self):
""" If keep-going is passed, we may need to compute the infer-deps
file with the paths to the captured files. To make sure that
this is done in a consistent way, we need to start the analysis
with an empty buck-out folder."""
if not os.path.exists(config.BUCK_OUT_TRASH):
os.makedirs(config.BUCK_OUT_TRASH)
tmp = tempfile.mkdtemp(
dir=config.BUCK_OUT_TRASH,
prefix=config.BUCK_OUT)
print('moving files in ' + config.BUCK_OUT + ' to ' + tmp)
for filename in os.listdir(config.BUCK_OUT):
if filename != config.TRASH:
shutil.move(os.path.join(config.BUCK_OUT, filename), tmp)
def _run_buck_with_flavors(self):
# TODO: Use buck to identify the project's root folder
if not os.path.isfile('.buckconfig'):
print('Please run this command from the folder where .buckconfig '
'is located')
return os.EX_USAGE
env_vars = utils.read_env()
infer_args = env_vars['INFER_ARGS']
if infer_args != '':
infer_args += '^' # '^' must be CommandLineOption.env_var_sep
infer_args += '--fcp-syntax-only'
env_vars['INFER_ARGS'] = infer_args
env = utils.encode_env(env_vars)
command = self.cmd
command += ['-j', str(self.args.multicore)]
if self.args.load_average is not None:
command += ['-L', str(self.args.load_average)]
command += self.create_cxx_buck_configuration_args()
try:
subprocess.check_call(command, env=env)
return os.EX_OK
except subprocess.CalledProcessError as e:
if self.keep_going:
print('Buck failed, but continuing the analysis '
'because --keep-going was passed')
return -1
else:
raise e
def capture_with_flavors(self):
if self.keep_going:
self._move_buck_out()
ret = self._run_buck_with_flavors()
if not ret == os.EX_OK and not self.keep_going:
return ret
result_paths = self._get_analysis_result_paths()
if result_paths is None:
# huho, the Buck command to extract results paths failed
return os.EX_SOFTWARE
merged_reports_path = os.path.join(
self.args.infer_out, config.JSON_REPORT_FILENAME)
merged_deps_path = os.path.join(
self.args.infer_out, config.INFER_BUCK_DEPS_FILENAME)
self._merge_infer_report_files(result_paths, merged_reports_path)
if not ret == os.EX_OK and self.keep_going:
self._find_deps_and_merge(merged_deps_path)
else:
self._merge_infer_dep_files(result_paths, merged_deps_path)
infer_out = self.args.infer_out
json_report = os.path.join(infer_out, config.JSON_REPORT_FILENAME)
bugs_out = os.path.join(infer_out, config.BUGS_FILENAME)
issues.print_and_save_errors(infer_out, self.args.project_root,
json_report, bugs_out, self.args.pmd_xml,
console_out=not self.args.quiet)
return os.EX_OK
def capture_without_flavors(self):
# Java is a special case, and we run the analysis from here
buck_wrapper = bucklib.Wrapper(self.args, self.cmd)
return buck_wrapper.run()
| bsd-3-clause | -5,515,190,734,831,969,000 | 40.011194 | 79 | 0.587026 | false | 3.975045 | true | false | false |
Snuggle/hypixel.py | leveling.py | 1 | 1255 | # This is an external library for calculating levels.
# It's basically a copy of https://github.com/Plancke/hypixel-php/blob/master/src/util/Leveling.php
# But written in Python.
from math import sqrt, floor
EXP_FIELD = 0
LVL_FIELD = 0
BASE = 10000
GROWTH = 2500
HALF_GROWTH = 0.5 * GROWTH
REVERSE_PQ_PREFIX = -(BASE - 0.5 * GROWTH)/GROWTH
REVERSE_CONST = REVERSE_PQ_PREFIX * REVERSE_PQ_PREFIX
GROWTH_DIVIDES_2 = 2/GROWTH
def getLevel(exp):
return floor(1+REVERSE_PQ_PREFIX + sqrt(REVERSE_CONST+GROWTH_DIVIDES_2*exp))
def getExactLevel(exp):
return getLevel(exp) + getPercentageToNextLevel(exp)
def getExpFromLevelToNext(level):
return GROWTH * (level-1) + BASE
def getTotalExpToLevel(level):
lv = floor(level)
x0 = getTotalExpToFullLevel(lv)
if level == lv:
return x0
else:
return (getTotalExpToFullLevel(lv+1) - x0) * (level % 1) + x0
def getTotalExpToFullLevel(level):
return (HALF_GROWTH * (level-2) + BASE) * (level-1)
def getPercentageToNextLevel(exp):
lv = getLevel(exp)
x0 = getTotalExpToLevel(lv)
return (exp-x0) / (getTotalExpToLevel(lv+1) - x0)
def getExperience(EXP_FIELD, LVL_FIELD):
exp = int(EXP_FIELD)
exp += getTotalExpToFullLevel(LVL_FIELD+1)
return exp | mit | 8,737,754,293,849,364,000 | 25.723404 | 99 | 0.698805 | false | 2.740175 | false | false | false |
McLive/DjangoPowerDNS | dpdns/api_permissions.py | 1 | 1283 | # --
# DjangoPowerDNS - A PowerDNS web interface
# Copyright (C) 2017 McLive
# --
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU AFFERO General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# --
from rest_framework import permissions
from dpdns.models import APIKey
class HasAPIAccess(permissions.BasePermission):
message = 'Invalid or missing API Key.'
def has_permission(self, request, view):
api_key = request.META.get('HTTP_API_KEY', '')
return APIKey.objects.filter(key=api_key).exists()
def has_object_permission(self, request, view, obj):
api_key = request.META.get('HTTP_API_KEY', '')
key = APIKey.objects.get(key=api_key)
domain = key.domain
return domain == obj | agpl-3.0 | -7,618,735,445,866,765,000 | 35.685714 | 77 | 0.717849 | false | 3.911585 | false | false | false |
kustodian/ansible | lib/ansible/modules/network/f5/bigip_profile_http.py | 23 | 61362 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_profile_http
short_description: Manage HTTP profiles on a BIG-IP
description:
- Manage HTTP profiles on a BIG-IP.
version_added: 2.7
options:
name:
description:
- Specifies the name of the profile.
type: str
required: True
parent:
description:
- Specifies the profile from which this profile inherits settings.
- When creating a new profile, if this parameter is not specified, the default
is the system-supplied C(http) profile.
type: str
default: /Common/http
description:
description:
- Description of the profile.
type: str
proxy_type:
description:
- Specifies the proxy mode for the profile.
- When creating a new profile, if this parameter is not specified, the
default is provided by the parent profile.
type: str
choices:
- reverse
- transparent
- explicit
dns_resolver:
description:
- Specifies the name of a configured DNS resolver, this option is mandatory when C(proxy_type)
is set to C(explicit).
- Format of the name can be either be prepended by partition (C(/Common/foo)), or specified
just as an object name (C(foo)).
- To remove the entry a value of C(none) or C('') can be set, however the profile C(proxy_type)
must not be set as C(explicit).
type: str
insert_xforwarded_for:
description:
- When specified system inserts an X-Forwarded-For header in an HTTP request
with the client IP address, to use with connection pooling.
- When creating a new profile, if this parameter is not specified, the
default is provided by the parent profile.
type: bool
redirect_rewrite:
description:
- Specifies whether the system rewrites the URIs that are part of HTTP
redirect (3XX) responses.
- When set to C(none) the system will not rewrite the URI in any
HTTP redirect responses.
- When set to C(all) the system rewrites the URI in all HTTP redirect responses.
- When set to C(matching) the system rewrites the URI in any
HTTP redirect responses that match the request URI.
- When set to C(nodes) if the URI contains a node IP address instead of a host name,
the system changes it to the virtual server address.
- When creating a new profile, if this parameter is not specified, the
default is provided by the parent profile.
type: str
choices:
- none
- all
- matching
- nodes
encrypt_cookies:
description:
- Cookie names for the system to encrypt.
- To remove the entry completely a value of C(none) or C('') should be set.
- When creating a new profile, if this parameter is not specified, the
default is provided by the parent profile.
type: list
encrypt_cookie_secret:
description:
- Passphrase for cookie encryption.
- When creating a new profile, if this parameter is not specified, the
default is provided by the parent profile.
type: str
update_password:
description:
- C(always) will update passwords if the C(encrypt_cookie_secret) is specified.
- C(on_create) will only set the password for newly created profiles.
type: str
choices:
- always
- on_create
default: always
header_erase:
description:
- The name of a header, in an HTTP request, which the system removes from request.
- To remove the entry completely a value of C(none) or C('') should be set.
- The format of the header must be in C(KEY:VALUE) format, otherwise error is raised.
- When creating a new profile, if this parameter is not specified, the
default is provided by the parent profile.
type: str
version_added: 2.8
header_insert:
description:
- A string that the system inserts as a header in an HTTP request.
- To remove the entry completely a value of C(none) or C('') should be set.
- The format of the header must be in C(KEY:VALUE) format, otherwise error is raised.
- When creating a new profile, if this parameter is not specified, the
default is provided by the parent profile.
type: str
version_added: 2.8
server_agent_name:
description:
- Specifies the string used as the server name in traffic generated by BIG-IP.
- To remove the entry completely a value of C(none) or C('') should be set.
- When creating a new profile, if this parameter is not specified, the
default is provided by the parent profile.
type: str
version_added: 2.8
include_subdomains:
description:
- When set to C(yes), applies the HSTS policy to the HSTS host and its sub-domains.
- When creating a new profile, if this parameter is not specified, the
default is provided by the parent profile.
type: bool
version_added: 2.8
maximum_age:
description:
- Specifies the maximum length of time, in seconds, that HSTS functionality
requests that clients only use HTTPS to connect to the current host and
any sub-domains of the current host's domain name.
- The accepted value range is C(0 - 4294967295) seconds, a value of C(0) seconds
re-enables plaintext HTTP access, while specifying C(indefinite) will set it to the maximum value.
- When creating a new profile, if this parameter is not specified, the
default is provided by the parent profile.
type: str
version_added: 2.8
hsts_mode:
description:
- When set to C(yes), enables the HSTS settings.
- When creating a new profile, if this parameter is not specified, the default is provided by the parent profile.
type: bool
version_added: 2.8
accept_xff:
description:
- Enables or disables trusting the client IP address, and statistics from the client IP address,
based on the request's XFF (X-forwarded-for) headers, if they exist.
- When creating a new profile, if this parameter is not specified, the default is provided by the parent profile.
type: bool
version_added: 2.9
xff_alternative_names:
description:
- Specifies alternative XFF headers instead of the default X-forwarded-for header.
- When creating a new profile, if this parameter is not specified, the default is provided by the parent profile.
type: list
version_added: 2.9
fallback_host:
description:
- Specifies an HTTP fallback host.
- When creating a new profile, if this parameter is not specified, the default is provided by the parent profile.
type: str
version_added: 2.9
fallback_status_codes:
description:
- Specifies one or more HTTP error codes from server responses that should trigger
a redirection to the fallback host.
- The accepted valid error codes are as defined by rfc2616.
- The codes can be specified as individual items or as valid ranges e.g. C(400-417) or C(500-505).
- Mixing response code range across error types is invalid e.g. defining C(400-505) will raise an error.
- When creating a new profile, if this parameter is not specified, the default is provided by the parent profile.
type: list
version_added: 2.9
oneconnect_transformations:
description:
- Enables the system to perform HTTP header transformations for the purpose of keeping server-side
connections open. This feature requires configuration of a OneConnect profile.
- When creating a new profile, if this parameter is not specified, the default is provided by the parent profile.
type: bool
version_added: 2.9
request_chunking:
description:
- Specifies how to handle chunked and unchunked requests.
- When creating a new profile, if this parameter is not specified, the default is provided by the parent profile.
type: str
choices:
- rechunk
- selective
- preserve
version_added: 2.9
response_chunking:
description:
- Specifies how to handle chunked and unchunked responses.
- When creating a new profile, if this parameter is not specified, the default is provided by the parent profile.
type: str
choices:
- rechunk
- selective
- preserve
version_added: 2.9
enforcement:
description:
- Specifies protocol enforcement settings for the HTTP profile.
- When creating a new profile, if this parameter is not specified, the default is provided by the parent profile.
suboptions:
truncated_redirects:
description:
- Specifies what happens if a truncated redirect is seen from a server.
- If C(yes), the redirect will be forwarded to the client, otherwise the malformed HTTP
will be silently ignored.
- When creating a new profile, if this parameter is not specified, the default is provided
by the parent profile.
type: bool
excess_client_headers:
description:
- Specifies the behavior when too many client headers are received.
- If set to C(pass-through), will switch to pass through mode, when C(reject) the connection will be rejected.
- When creating a new profile, if this parameter is not specified, the default is provided
by the parent profile.
type: str
choices:
- reject
- pass-through
excess_server_headers:
description:
- Specifies the behavior when too many server headers are received.
- If set to C(pass-through), will switch to pass through mode, when C(reject) the connection will be rejected.
- When creating a new profile, if this parameter is not specified, the default is provided
by the parent profile.
type: str
choices:
- reject
- pass-through
oversize_client_headers:
description:
- Specifies the behavior when too-large client headers are received.
- If set to C(pass-through), will switch to pass through mode, when C(reject) the connection will be rejected.
- When creating a new profile, if this parameter is not specified, the default is provided
by the parent profile.
type: str
choices:
- reject
- pass-through
oversize_server_headers:
description:
- Specifies the behavior when too-large server headers are received.
- If set to C(pass-through), will switch to pass through mode, when C(reject) the connection will be rejected.
- When creating a new profile, if this parameter is not specified, the default is provided
by the parent profile.
type: str
choices:
- reject
- pass-through
pipeline:
description:
- Enables HTTP/1.1 pipelining, allowing clients to make requests even when prior requests have not received
a response.
- In order for this to succeed, however, destination servers must include support for pipelining.
- If set to C(pass-through), pipelined data will cause the BIG-IP to immediately switch to pass-through mode
and disable the HTTP filter.
- When creating a new profile, if this parameter is not specified, the default is provided
by the parent profile.
type: str
choices:
- allow
- reject
- pass-through
unknown_method:
description:
- Specifies whether to allow, reject or switch to pass-through mode when an unknown HTTP method is parsed.
- When creating a new profile, if this parameter is not specified, the default is provided
by the parent profile.
type: str
choices:
- allow
- reject
- pass-through
max_header_count:
description:
- Specifies the maximum number of headers allowed in HTTP request/response.
- The valid value range is between 16 and 4096 inclusive.
- When set to C(default) the value of this parameter will be C(64)
- When creating a new profile, if this parameter is not specified, the default is provided
by the parent profile.
type: str
max_header_size:
description:
- Specifies the maximum header size specified in bytes.
- The valid value range is between 0 and 4294967295 inclusive.
- When set to C(default) the value of this parameter will be C(32768) bytes
- When creating a new profile, if this parameter is not specified, the default is provided
by the parent profile.
type: str
max_requests:
description:
- Specifies the number of requests that the system accepts on a per-connection basis.
- The valid value range is between 0 and 4294967295 inclusive.
- When set to C(default) the value of this parameter will be C(0), which means the system
will not limit the number of requests per connection.
- When creating a new profile, if this parameter is not specified, the default is provided
by the parent profile.
type: str
known_methods:
description:
- Specifies which HTTP methods count as being known, removing RFC-defined methods from this list
will cause the HTTP filter to not recognize them.
- "The default list provided with the system include: C(CONNECT), C(DELETE), C(GET),
C(HEAD), C(LOCK), C(OPTIONS), C(POST), C(PROPFIND), C(PUT), C(TRACE) ,C(UNLOCK). The list can be appended by
by specifying C(default) keyword as one of the list elements."
- The C(default) keyword can also be used to restore the default C(known_methods) on the system.
- When creating a new profile, if this parameter is not specified, the default is provided
by the parent profile.
type: list
type: dict
version_added: 2.9
sflow:
description:
- Specifies sFlow settings for the HTTP profile.
- When creating a new profile, if this parameter is not specified, the default is provided by the parent profile.
suboptions:
poll_interval:
description:
- Specifies the maximum interval in seconds between two pollings.
- The valid value range is between 0 and 4294967295 seconds inclusive.
- For this setting to take effect the C(poll_interval_global) parameter must be set to C(no).
- When creating a new profile, if this parameter is not specified, the default is provided
by the parent profile.
type: int
poll_interval_global:
description:
- Specifies whether the global HTTP poll-interval setting overrides the object-level Cpoll-interval setting.
- When creating a new profile, if this parameter is not specified, the default is provided
by the parent profile.
type: bool
sampling_rate:
description:
- Specifies the ratio of packets observed to the samples generated. For example, a sampling rate of C(2000)
specifies that 1 sample will be randomly generated for every 2000 packets observed.
- The valid value range is between 0 and 4294967295 packets inclusive.
- For this setting to take effect the C(sampling_rate_global) parameter must be set to C(no).
- When creating a new profile, if this parameter is not specified, the default is provided
by the parent profile.
type: int
sampling_rate_global:
description:
- Specifies whether the global HTTP sampling-rate setting overrides the object-level sampling-rate setting.
- When creating a new profile, if this parameter is not specified, the default is provided
by the parent profile.
type: bool
type: dict
version_added: 2.9
partition:
description:
- Device partition to manage resources on.
type: str
default: Common
state:
description:
- When C(present), ensures that the profile exists.
- When C(absent), ensures the profile is removed.
type: str
choices:
- present
- absent
default: present
extends_documentation_fragment: f5
author:
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create HTTP profile
bigip_profile_http:
name: my_profile
insert_xforwarded_for: yes
redirect_rewrite: all
state: present
provider:
user: admin
password: secret
server: lb.mydomain.com
delegate_to: localhost
- name: Remove HTTP profile
bigip_profile_http:
name: my_profile
state: absent
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Add HTTP profile for transparent proxy
bigip_profile_http:
name: my_profile
proxy_type: transparent
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
parent:
description: Specifies the profile from which this profile inherits settings.
returned: changed
type: str
sample: /Common/http
description:
description: Description of the profile.
returned: changed
type: str
sample: My profile
proxy_type:
description: Specify proxy mode of the profile.
returned: changed
type: str
sample: explicit
hsts_mode:
description: Enables the HSTS settings.
returned: changed
type: bool
sample: no
maximum_age:
description: The maximum length of time, in seconds, that HSTS functionality requests that clients only use HTTPS.
returned: changed
type: str
sample: indefinite
include_subdomains:
description: Applies the HSTS policy to the HSTS host and its sub-domains.
returned: changed
type: bool
sample: yes
server_agent_name:
description: The string used as the server name in traffic generated by BIG-IP.
returned: changed
type: str
sample: foobar
header_erase:
description: The name of a header, in an HTTP request, which the system removes from request.
returned: changed
type: str
sample: FOO:BAR
header_insert:
description: The string that the system inserts as a header in an HTTP request.
returned: changed
type: str
sample: FOO:BAR
insert_xforwarded_for:
description: Insert X-Forwarded-For-Header.
returned: changed
type: bool
sample: yes
redirect_rewrite:
description: Rewrite URI that are part of 3xx responses.
returned: changed
type: str
sample: all
encrypt_cookies:
description: Cookie names to encrypt.
returned: changed
type: list
sample: ['MyCookie1', 'MyCookie2']
dns_resolver:
description: Configured dns resolver.
returned: changed
type: str
sample: '/Common/FooBar'
accept_xff:
description: Enables or disables trusting the client IP address, and statistics from the client IP address.
returned: changed
type: bool
sample: yes
xff_alternative_names:
description: Specifies alternative XFF headers instead of the default X-forwarded-for header.
returned: changed
type: list
sample: ['FooBar', 'client1']
fallback_host:
description: Specifies an HTTP fallback host.
returned: changed
type: str
sample: 'foobar.com'
fallback_status_codes:
description: HTTP error codes from server responses that should trigger a redirection to the fallback host.
returned: changed
type: list
sample: ['400-404', '500', '501']
oneconnect_transformations:
description: Enables or disables HTTP header transformations.
returned: changed
type: bool
sample: no
request_chunking:
description: Specifies how to handle chunked and unchunked requests.
returned: changed
type: str
sample: rechunk
response_chunking:
description: Specifies how to handle chunked and unchunked responses.
returned: changed
type: str
sample: rechunk
enforcement:
description: Specifies protocol enforcement settings for the HTTP profile.
type: complex
returned: changed
contains:
truncated_redirects:
description: Specifies what happens if a truncated redirect is seen from a server.
returned: changed
type: bool
sample: yes
excess_server_headers:
description: Specifies the behavior when too many server headers are received.
returned: changed
type: str
sample: pass-through
oversize_client_headers:
description: Specifies the behavior when too-large client headers are received.
returned: changed
type: str
sample: reject
oversize_server_headers:
description: Specifies the behavior when too-large server headers are received.
returned: changed
type: str
sample: reject
pipeline:
description: Allows, rejects or switches to pass-through mode when dealing with pipelined data.
returned: changed
type: str
sample: allow
unknown_method:
description: Allows, rejects or switches to pass-through mode when an unknown HTTP method is parsed.
returned: changed
type: str
sample: allow
max_header_count:
description: The maximum number of headers allowed in HTTP request/response.
returned: changed
type: str
sample: 4096
max_header_size:
description: The maximum header size specified in bytes.
returned: changed
type: str
sample: default
max_requests:
description: The number of requests that the system accepts on a per-connection basis.
returned: changed
type: str
sample: default
known_methods:
description: The list of known HTTP methods.
returned: changed
type: list
sample: ['default', 'FOO', 'BAR']
sample: hash/dictionary of values
sflow:
description: Specifies sFlow settings for the HTTP profile.
type: complex
returned: changed
contains:
poll_interval:
description: Specifies the maximum interval in seconds between two pollings.
returned: changed
type: int
sample: 30
poll_interval_global:
description: Enables/Disables overriding HTTP poll-interval setting.
returned: changed
type: bool
sample: yes
sampling_rate:
description: Specifies the ratio of packets observed to the samples generated.
returned: changed
type: int
sample: 2000
sampling_rate_global:
description: Enables/Disables overriding HTTP sampling-rate setting.
returned: changed
type: bool
sample: yes
sample: hash/dictionary of values
'''
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import flatten_boolean
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.compare import cmp_simple_list
from library.module_utils.network.f5.urls import check_header_validity
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import flatten_boolean
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.compare import cmp_simple_list
from ansible.module_utils.network.f5.urls import check_header_validity
class Parameters(AnsibleF5Parameters):
api_map = {
'defaultsFrom': 'parent',
'insertXforwardedFor': 'insert_xforwarded_for',
'redirectRewrite': 'redirect_rewrite',
'encryptCookies': 'encrypt_cookies',
'encryptCookieSecret': 'encrypt_cookie_secret',
'proxyType': 'proxy_type',
'explicitProxy': 'explicit_proxy',
'headerErase': 'header_erase',
'headerInsert': 'header_insert',
'serverAgentName': 'server_agent_name',
'includeSubdomains': 'include_subdomains',
'maximumAge': 'maximum_age',
'mode': 'hsts_mode',
'acceptXff': 'accept_xff',
'xffAlternativeNames': 'xff_alternative_names',
'fallbackHost': 'fallback_host',
'fallbackStatusCodes': 'fallback_status_codes',
'oneconnectTransformations': 'oneconnect_transformations',
'requestChunking': 'request_chunking',
'responseChunking': 'response_chunking',
}
api_attributes = [
'insertXforwardedFor',
'description',
'defaultsFrom',
'redirectRewrite',
'encryptCookies',
'encryptCookieSecret',
'proxyType',
'explicitProxy',
'headerErase',
'headerInsert',
'hsts',
'serverAgentName',
'acceptXff',
'xffAlternativeNames',
'fallbackHost',
'fallbackStatusCodes',
'oneconnectTransformations',
'requestChunking',
'responseChunking',
'enforcement',
'sflow',
]
returnables = [
'parent',
'description',
'insert_xforwarded_for',
'redirect_rewrite',
'encrypt_cookies',
'proxy_type',
'explicit_proxy',
'dns_resolver',
'hsts_mode',
'maximum_age',
'include_subdomains',
'server_agent_name',
'header_erase',
'header_insert',
'accept_xff',
'xff_alternative_names',
'fallback_host',
'fallback_status_codes',
'oneconnect_transformations',
'request_chunking',
'response_chunking',
'truncated_redirects',
'excess_client_headers',
'excess_server_headers',
'oversize_client_headers',
'oversize_server_headers',
'pipeline',
'unknown_method',
'max_header_count',
'max_header_size',
'max_requests',
'known_methods',
'poll_interval',
'poll_interval_global',
'sampling_rate',
'sampling_rate_global',
]
updatables = [
'description',
'insert_xforwarded_for',
'redirect_rewrite',
'encrypt_cookies',
'encrypt_cookie_secret',
'proxy_type',
'dns_resolver',
'hsts_mode',
'maximum_age',
'include_subdomains',
'server_agent_name',
'header_erase',
'header_insert',
'accept_xff',
'xff_alternative_names',
'fallback_host',
'fallback_status_codes',
'oneconnect_transformations',
'request_chunking',
'response_chunking',
'truncated_redirects',
'excess_client_headers',
'excess_server_headers',
'oversize_client_headers',
'oversize_server_headers',
'pipeline',
'unknown_method',
'max_header_count',
'max_header_size',
'max_requests',
'known_methods',
'poll_interval',
'poll_interval_global',
'sampling_rate',
'sampling_rate_global',
]
class ApiParameters(Parameters):
@property
def poll_interval(self):
return self._values['sflow']['pollInterval']
@property
def poll_interval_global(self):
return self._values['sflow']['pollIntervalGlobal']
@property
def sampling_rate(self):
return self._values['sflow']['samplingRate']
@property
def sampling_rate_global(self):
return self._values['sflow']['samplingRateGlobal']
@property
def truncated_redirects(self):
return self._values['enforcement']['truncatedRedirects']
@property
def excess_client_headers(self):
return self._values['enforcement']['excessClientHeaders']
@property
def excess_server_headers(self):
return self._values['enforcement']['excessServerHeaders']
@property
def oversize_client_headers(self):
return self._values['enforcement']['oversizeClientHeaders']
@property
def oversize_server_headers(self):
return self._values['enforcement']['oversizeServerHeaders']
@property
def pipeline(self):
return self._values['enforcement']['pipeline']
@property
def unknown_method(self):
return self._values['enforcement']['unknownMethod']
@property
def max_header_count(self):
return self._values['enforcement']['maxHeaderCount']
@property
def max_header_size(self):
return self._values['enforcement']['maxHeaderSize']
@property
def max_requests(self):
return self._values['enforcement']['maxRequests']
@property
def known_methods(self):
return self._values['enforcement'].get('knownMethods', None)
@property
def dns_resolver(self):
if self._values['explicit_proxy'] is None:
return None
if 'dnsResolver' in self._values['explicit_proxy']:
return self._values['explicit_proxy']['dnsResolver']
@property
def dns_resolver_address(self):
if self._values['explicit_proxy'] is None:
return None
if 'dnsResolverReference' in self._values['explicit_proxy']:
return self._values['explicit_proxy']['dnsResolverReference']
@property
def include_subdomains(self):
if self._values['hsts'] is None:
return None
return self._values['hsts']['includeSubdomains']
@property
def hsts_mode(self):
if self._values['hsts'] is None:
return None
return self._values['hsts']['mode']
@property
def maximum_age(self):
if self._values['hsts'] is None:
return None
return self._values['hsts']['maximumAge']
class ModuleParameters(Parameters):
@property
def accept_xff(self):
result = flatten_boolean(self._values['accept_xff'])
if result is None:
return None
if result == 'yes':
return 'enabled'
return 'disabled'
@property
def fallback_status_codes(self):
if self._values['fallback_status_codes'] is None:
return None
p1 = r'(?!([4][0-1][0-7]))\d{3}'
p2 = r'(?!(50[0-5]))\d{3}'
for code in self._values['fallback_status_codes']:
match_4xx = re.search(p1, code)
if match_4xx:
match_5xx = re.search(p2, code)
if match_5xx:
raise F5ModuleError(
'Invalid HTTP error code or error code range specified.'
)
return self._values['fallback_status_codes']
@property
def oneconnect_transformations(self):
result = flatten_boolean(self._values['oneconnect_transformations'])
if result is None:
return None
if result == 'yes':
return 'enabled'
return 'disabled'
@property
def proxy_type(self):
if self._values['proxy_type'] is None:
return None
if self._values['proxy_type'] == 'explicit':
if self.dns_resolver is None or self.dns_resolver == '':
raise F5ModuleError(
'A proxy type cannot be set to {0} without providing DNS resolver.'.format(self._values['proxy_type'])
)
return self._values['proxy_type']
@property
def dns_resolver(self):
if self._values['dns_resolver'] is None:
return None
if self._values['dns_resolver'] == '' or self._values['dns_resolver'] == 'none':
return ''
result = fq_name(self.partition, self._values['dns_resolver'])
return result
@property
def dns_resolver_address(self):
resolver = self.dns_resolver
if resolver is None:
return None
tmp = resolver.split('/')
link = dict(link='https://localhost/mgmt/tm/net/dns-resolver/~{0}~{1}'.format(tmp[1], tmp[2]))
return link
@property
def insert_xforwarded_for(self):
result = flatten_boolean(self._values['insert_xforwarded_for'])
if result is None:
return None
if result == 'yes':
return 'enabled'
return 'disabled'
@property
def parent(self):
if self._values['parent'] is None:
return None
result = fq_name(self.partition, self._values['parent'])
return result
@property
def encrypt_cookies(self):
if self._values['encrypt_cookies'] is None:
return None
if self._values['encrypt_cookies'] == [''] or self._values['encrypt_cookies'] == ['none']:
return list()
return self._values['encrypt_cookies']
@property
def explicit_proxy(self):
if self.dns_resolver is None:
return None
result = dict(
dnsResolver=self.dns_resolver,
dnsResolverReference=self.dns_resolver_address
)
return result
@property
def include_subdomains(self):
result = flatten_boolean(self._values['include_subdomains'])
if result is None:
return None
if result == 'yes':
return 'enabled'
return 'disabled'
@property
def maximum_age(self):
if self._values['maximum_age'] is None:
return None
if self._values['maximum_age'] == 'indefinite':
return 4294967295
if 0 <= int(self._values['maximum_age']) <= 4294967295:
return int(self._values['maximum_age'])
raise F5ModuleError(
"Valid 'maximum_age' must be in range 0 - 4294967295, or 'indefinite'."
)
@property
def hsts_mode(self):
result = flatten_boolean(self._values['hsts_mode'])
if result is None:
return None
if result == 'yes':
return 'enabled'
return 'disabled'
@property
def header_erase(self):
header_erase = self._values['header_erase']
if header_erase is None:
return None
if header_erase in ['none', '']:
return self._values['header_erase']
check_header_validity(header_erase)
return header_erase
@property
def header_insert(self):
header_insert = self._values['header_insert']
if header_insert is None:
return None
if header_insert in ['none', '']:
return self._values['header_insert']
check_header_validity(header_insert)
return header_insert
@property
def excess_client_headers(self):
if self._values['enforcement'] is None:
return None
return self._values['enforcement']['excess_client_headers']
@property
def excess_server_headers(self):
if self._values['enforcement'] is None:
return None
return self._values['enforcement']['excess_server_headers']
@property
def oversize_client_headers(self):
if self._values['enforcement'] is None:
return None
return self._values['enforcement']['oversize_client_headers']
@property
def oversize_server_headers(self):
if self._values['enforcement'] is None:
return None
return self._values['enforcement']['oversize_server_headers']
@property
def pipeline(self):
if self._values['enforcement'] is None:
return None
return self._values['enforcement']['pipeline']
@property
def unknown_method(self):
if self._values['enforcement'] is None:
return None
return self._values['enforcement']['unknown_method']
@property
def truncated_redirects(self):
if self._values['enforcement'] is None:
return None
result = flatten_boolean(self._values['enforcement']['truncated_redirects'])
if result is None:
return None
if result == 'yes':
return 'enabled'
return 'disabled'
@property
def max_header_count(self):
if self._values['enforcement'] is None:
return None
if self._values['enforcement']['max_header_count'] is None:
return None
if self._values['enforcement']['max_header_count'] == 'default':
return 64
if 16 <= int(self._values['enforcement']['max_header_count']) <= 4096:
return int(self._values['enforcement']['max_header_count'])
raise F5ModuleError(
"Valid 'max_header_count' must be in range 16 - 4096, or 'default'."
)
@property
def max_header_size(self):
if self._values['enforcement'] is None:
return None
if self._values['enforcement']['max_header_size'] is None:
return None
if self._values['enforcement']['max_header_size'] == 'default':
return 32768
if 0 <= int(self._values['enforcement']['max_header_size']) <= 4294967295:
return int(self._values['enforcement']['max_header_size'])
raise F5ModuleError(
"Valid 'max_header_size' must be in range 0 - 4294967295, or 'default'."
)
@property
def max_requests(self):
if self._values['enforcement'] is None:
return None
if self._values['enforcement']['max_requests'] is None:
return None
if self._values['enforcement']['max_requests'] == 'default':
return 0
if 0 <= int(self._values['enforcement']['max_requests']) <= 4294967295:
return int(self._values['enforcement']['max_requests'])
raise F5ModuleError(
"Valid 'max_requests' must be in range 0 - 4294967295, or 'default'."
)
@property
def known_methods(self):
if self._values['enforcement'] is None:
return None
defaults = ['CONNECT', 'DELETE', 'GET', 'HEAD', 'LOCK', 'OPTIONS', 'POST', 'PROPFIND', 'PUT', 'TRACE', 'UNLOCK']
known = self._values['enforcement']['known_methods']
if known is None:
return None
if len(known) == 1:
if known[0] == 'default':
return defaults
if known[0] == '':
return []
if 'default' in known:
to_return = [method for method in known if method != 'default']
to_return.extend(defaults)
return to_return
result = [method for method in known]
return result
@property
def poll_interval(self):
if self._values['sflow'] is None:
return None
if self._values['sflow']['poll_interval'] is None:
return None
if 0 <= self._values['sflow']['poll_interval'] <= 4294967295:
return self._values['sflow']['poll_interval']
raise F5ModuleError(
"Valid 'poll_interval' must be in range 0 - 4294967295 seconds."
)
@property
def sampling_rate(self):
if self._values['sflow'] is None:
return None
if self._values['sflow']['sampling_rate'] is None:
return None
if 0 <= self._values['sflow']['sampling_rate'] <= 4294967295:
return self._values['sflow']['sampling_rate']
raise F5ModuleError(
"Valid 'sampling_rate' must be in range 0 - 4294967295 packets."
)
@property
def poll_interval_global(self):
if self._values['sflow'] is None:
return None
result = flatten_boolean(self._values['sflow']['poll_interval_global'])
return result
@property
def sampling_rate_global(self):
if self._values['sflow'] is None:
return None
result = flatten_boolean(self._values['sflow']['sampling_rate_global'])
return result
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
@property
def explicit_proxy(self):
result = dict()
if self._values['dns_resolver'] is not None:
result['dnsResolver'] = self._values['dns_resolver']
if self._values['dns_resolver_address'] is not None:
result['dnsResolverReference'] = self._values['dns_resolver_address']
if not result:
return None
return result
@property
def hsts(self):
result = dict()
if self._values['hsts_mode'] is not None:
result['mode'] = self._values['hsts_mode']
if self._values['maximum_age'] is not None:
result['maximumAge'] = self._values['maximum_age']
if self._values['include_subdomains'] is not None:
result['includeSubdomains'] = self._values['include_subdomains']
if not result:
return None
return result
@property
def enforcement(self):
to_filter = dict(
excessClientHeaders=self._values['excess_client_headers'],
excessServerHeaders=self._values['excess_server_headers'],
knownMethods=self._values['known_methods'],
maxHeaderCount=self._values['max_header_count'],
maxHeaderSize=self._values['max_header_size'],
maxRequests=self._values['max_requests'],
oversizeClientHeaders=self._values['oversize_client_headers'],
oversizeServerHeaders=self._values['oversize_server_headers'],
pipeline=self._values['pipeline'],
truncatedRedirects=self._values['truncated_redirects'],
unknownMethod=self._values['unknown_method']
)
result = self._filter_params(to_filter)
if result:
return result
@property
def sflow(self):
to_filter = dict(
pollInterval=self._values['poll_interval'],
pollIntervalGlobal=self._values['poll_interval_global'],
samplingRate=self._values['sampling_rate'],
samplingRateGlobal=self._values['sampling_rate_global'],
)
result = self._filter_params(to_filter)
if result:
return result
class ReportableChanges(Changes):
returnables = [
'parent',
'description',
'insert_xforwarded_for',
'redirect_rewrite',
'encrypt_cookies',
'proxy_type',
'explicit_proxy',
'dns_resolver',
'hsts_mode',
'maximum_age',
'include_subdomains',
'server_agent_name',
'header_erase',
'header_insert',
'accept_xff',
'xff_alternative_names',
'fallback_host',
'fallback_status_codes',
'oneconnect_transformations',
'request_chunking',
'response_chunking',
'enforcement',
'sflow'
]
@property
def insert_xforwarded_for(self):
if self._values['insert_xforwarded_for'] is None:
return None
elif self._values['insert_xforwarded_for'] == 'enabled':
return 'yes'
return 'no'
@property
def hsts_mode(self):
if self._values['hsts_mode'] is None:
return None
elif self._values['hsts_mode'] == 'enabled':
return 'yes'
return 'no'
@property
def include_subdomains(self):
if self._values['include_subdomains'] is None:
return None
elif self._values['include_subdomains'] == 'enabled':
return 'yes'
return 'no'
@property
def maximum_age(self):
if self._values['maximum_age'] is None:
return None
if self._values['maximum_age'] == 4294967295:
return 'indefinite'
return int(self._values['maximum_age'])
@property
def truncated_redirects(self):
result = flatten_boolean(self._values['truncated_redirects'])
return result
@property
def max_header_count(self):
if self._values['max_header_count'] is None:
return None
if self._values['max_header_count'] == 64:
return 'default'
return str(self._values['max_header_count'])
@property
def max_header_size(self):
if self._values['max_header_size'] is None:
return None
if self._values['max_header_size'] == 32768:
return 'default'
return str(self._values['max_header_size'])
@property
def max_requests(self):
if self._values['max_requests'] is None:
return None
if self._values['max_requests'] == 0:
return 'default'
return str(self._values['max_requests'])
@property
def known_methods(self):
defaults = ['CONNECT', 'DELETE', 'GET', 'HEAD', 'LOCK', 'OPTIONS', 'POST', 'PROPFIND', 'PUT', 'TRACE', 'UNLOCK']
known = self._values['known_methods']
if known is None:
return None
if not known:
return ['']
if set(known) == set(defaults):
return ['default']
if set(known).issuperset(set(defaults)):
result = [item for item in known if item not in defaults]
result.append('default')
return result
return known
@property
def enforcement(self):
to_filter = dict(
excess_client_headers=self._values['excess_client_headers'],
excess_server_headers=self._values['excess_server_headers'],
known_methods=self.known_methods,
max_header_count=self.max_header_count,
max_header_size=self.max_header_size,
max_requests=self.max_requests,
oversize_client_headers=self._values['oversize_client_headers'],
oversize_server_headers=self._values['oversize_server_headers'],
pipeline=self._values['pipeline'],
truncated_redirects=self.truncated_redirects,
unknown_method=self._values['unknown_method']
)
result = self._filter_params(to_filter)
if result:
return result
@property
def accept_xff(self):
result = flatten_boolean(self._values['accept_xff'])
return result
@property
def oneconnect_transformations(self):
result = flatten_boolean(self._values['oneconnect_transformations'])
return result
@property
def sflow(self):
to_filter = dict(
poll_interval=self._values['poll_interval'],
poll_interval_global=self._values['poll_interval_global'],
sampling_rate=self._values['sampling_rate'],
sampling_rate_global=self._values['sampling_rate_global'],
)
result = self._filter_params(to_filter)
if result:
return result
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def parent(self):
if self.want.parent != self.have.parent:
raise F5ModuleError(
"The parent http profile cannot be changed"
)
@property
def dns_resolver(self):
if self.want.dns_resolver is None:
return None
if self.want.dns_resolver == '':
if self.have.dns_resolver is None or self.have.dns_resolver == 'none':
return None
elif self.have.proxy_type == 'explicit' and self.want.proxy_type is None:
raise F5ModuleError(
"DNS resolver cannot be empty or 'none' if an existing profile proxy type is set to {0}.".format(self.have.proxy_type)
)
elif self.have.dns_resolver is not None:
return self.want.dns_resolver
if self.have.dns_resolver is None:
return self.want.dns_resolver
@property
def header_erase(self):
if self.want.header_erase is None:
return None
if self.want.header_erase in ['none', '']:
if self.have.header_erase in [None, 'none']:
return None
if self.want.header_erase != self.have.header_erase:
return self.want.header_erase
@property
def header_insert(self):
if self.want.header_insert is None:
return None
if self.want.header_insert in ['none', '']:
if self.have.header_insert in [None, 'none']:
return None
if self.want.header_insert != self.have.header_insert:
return self.want.header_insert
@property
def server_agent_name(self):
if self.want.server_agent_name is None:
return None
if self.want.server_agent_name in ['none', '']:
if self.have.server_agent_name in [None, 'none']:
return None
if self.want.server_agent_name != self.have.server_agent_name:
return self.want.server_agent_name
@property
def encrypt_cookies(self):
if self.want.encrypt_cookies is None:
return None
if self.have.encrypt_cookies in [None, []]:
if not self.want.encrypt_cookies:
return None
else:
return self.want.encrypt_cookies
if set(self.want.encrypt_cookies) != set(self.have.encrypt_cookies):
return self.want.encrypt_cookies
@property
def encrypt_cookie_secret(self):
if self.want.encrypt_cookie_secret != self.have.encrypt_cookie_secret:
if self.want.update_password == 'always':
result = self.want.encrypt_cookie_secret
return result
@property
def xff_alternative_names(self):
result = cmp_simple_list(self.want.xff_alternative_names, self.have.xff_alternative_names)
return result
@property
def fallback_status_codes(self):
result = cmp_simple_list(self.want.fallback_status_codes, self.have.fallback_status_codes)
return result
@property
def known_methods(self):
result = cmp_simple_list(self.want.known_methods, self.have.known_methods)
return result
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
if self.exists():
return self.remove()
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/http/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/http/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403, 404]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return response['selfLink']
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/http/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 404]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/http/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/http/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
self.chunk = ['rechunk', 'selective', 'preserve']
self.choices = ['pass-through', 'reject']
self.select = ['allow', 'pass-through', 'reject']
argument_spec = dict(
name=dict(required=True),
parent=dict(default='/Common/http'),
description=dict(),
accept_xff=dict(type='bool'),
xff_alternative_names=dict(type='list'),
fallback_host=dict(),
fallback_status_codes=dict(type='list'),
oneconnect_transformations=dict(type='bool'),
request_chunking=dict(choices=self.chunk),
response_chunking=dict(choices=self.chunk),
proxy_type=dict(
choices=[
'reverse',
'transparent',
'explicit'
]
),
dns_resolver=dict(),
insert_xforwarded_for=dict(type='bool'),
redirect_rewrite=dict(
choices=[
'none',
'all',
'matching',
'nodes'
]
),
encrypt_cookies=dict(type='list'),
encrypt_cookie_secret=dict(no_log=True),
update_password=dict(
default='always',
choices=['always', 'on_create']
),
header_erase=dict(),
header_insert=dict(),
server_agent_name=dict(),
hsts_mode=dict(type='bool'),
maximum_age=dict(),
include_subdomains=dict(type='bool'),
enforcement=dict(
type='dict',
options=dict(
truncated_redirects=dict(type='bool'),
excess_client_headers=dict(choices=self.choices),
excess_server_headers=dict(choices=self.choices),
oversize_client_headers=dict(choices=self.choices),
oversize_server_headers=dict(choices=self.choices),
pipeline=dict(choices=self.select),
unknown_method=dict(choices=self.select),
max_header_count=dict(),
max_header_size=dict(),
max_requests=dict(),
known_methods=dict(type='list'),
)
),
sflow=dict(
type='dict',
options=dict(
poll_interval=dict(type='int'),
poll_interval_global=dict(type='bool'),
sampling_rate=dict(type='int'),
sampling_rate_global=dict(type='bool'),
)
),
state=dict(
default='present',
choices=['present', 'absent']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 | -2,387,226,522,644,654,000 | 33.924303 | 138 | 0.611176 | false | 4.352224 | false | false | false |
guoruibiao/crosswall | crowall.py | 1 | 1462 | # coding:utf-8
import sys
reload(sys)
sys.setdefaultencoding('utf8')
# __author__ = '郭 璞'
# __date__ = '2016/10/24'
# __Desc__ = 翻墙助手, 默认会先备份一下当前的hosts文件,防止出现意外,另外可以跨平台使用
import platform
import os
import urllib2
def downloadHosts(url):
file = open('./hosts.txt', 'wb')
data = urllib2.urlopen(url).readlines()
file.writelines(data)
file.close()
def crosswall(systemtype='Window'):
try:
if systemtype == 'Windows':
os.system('copy %SystemRoot%\System32\drivers\etc\hosts %SystemRoot%\System32\drivers\etc\hosts_bak')
os.system('copy hosts.txt %SystemRoot%\System32\drivers\etc\hosts')
os.system('ipconfig /flushdns')
os.system('pause')
print 'It\'s done on Windows! And Try your browser!'
elif systemtype == "Linux":
os.system('cp /etc/hosts /etc/hosts_bak')
os.system('mv ./hosts.txt /etc/hosts')
os.system('pause')
os.system('sudo /etc/init.d/networking restart ')
print 'It\'s done on Linux! And Try your browser!'
except Exception as e:
print e
if __name__ == '__main__':
url = 'https://raw.githubusercontent.com/racaljk/hosts/master/hosts'
downloadHosts(url=url)
print 'Hosts update success!'
crosswall(platform.system())
print 'Hosts replaced success! Try to cross the wall!'
| mit | -272,186,543,264,640,670 | 28.531915 | 114 | 0.617435 | false | 3.140271 | false | false | false |
ashishbaghudana/relna | relna/preprocessing/edges.py | 1 | 3319 | import abc
from nala.structures.data import Edge
from nltk.stem import PorterStemmer
class EdgeGenerator:
"""
Abstract class for generating edges between two entities. Each edge represents
a possible relationship between the two entities
Subclasses that inherit this class should:
* Be named [Name]EdgeGenerator
* Implement the abstract method generate
* Append new items to the list field "edges" of each Part in the dataset
"""
@abc.abstractmethod
def generate(self, dataset):
"""
:type dataset: nala.structures.data.Dataset
"""
return
class SimpleEdgeGenerator(EdgeGenerator):
"""
Simple implementation of generating edges between the two entities
if they are contained in the same sentence.
Implements the abstract class EdgeGenerator.
:type entity1_class: str
:type entity2_class: str
:type relation_type: str
"""
def __init__(self, entity1_class, entity2_class, relation_type):
self.entity1_class = entity1_class
self.entity2_class = entity2_class
self.relation_type = relation_type
def generate(self, dataset):
from itertools import product, chain
for part in dataset.parts():
part.edges = []
for ann_1, ann_2 in product(
(ann for ann in chain(part.annotations, part.predicted_annotations) if ann.class_id == self.entity1_class),
(ann for ann in chain(part.annotations, part.predicted_annotations) if ann.class_id == self.entity2_class)):
index_1 = part.get_sentence_index_for_annotation(ann_1)
index_2 = part.get_sentence_index_for_annotation(ann_2)
if index_1 == index_2 and index_1 != None:
part.edges.append(
Edge(ann_1, ann_2, self.relation_type,
part.sentences[index_1], index_1, part))
class WordFilterEdgeGenerator(EdgeGenerator):
"""
Simple implementation of generating edges between the two entities
if they are contained in the same sentence.
Implements the abstract class EdgeGenerator.
:type entity1_class: str
:type entity2_class: str
:type relation_type: str
"""
def __init__(self, entity1_class, entity2_class, words):
self.entity1_class = entity1_class
self.entity2_class = entity2_class
self.relation_type = relation_type
self.words = words
def generate(self, dataset):
from itertools import product
for part in dataset.parts():
for ann_1, ann_2 in product(
(ann for ann in part.annotations if ann.class_id == self.entity1_class),
(ann for ann in part.annotations if ann.class_id == self.entity2_class)):
index_1 = part.get_sentence_index_for_annotation(ann_1)
index_2 = part.get_sentence_index_for_annotation(ann_2)
if index_1 == index_2 and index_1 != None:
for token in part.sentences[index1]:
if token.word in self.words:
part.edges.append(
Edge(ann_1, ann_2, self.relation_type,
part.sentences[index_1], index_1, part))
| mit | 1,887,039,554,789,854,200 | 38.047059 | 128 | 0.614944 | false | 4.260591 | false | false | false |
webmasterraj/gitrecommender | old_code/mysql.py | 1 | 2335 | import MySQLdb
import os
import sys
import urlparse
def get_db_variables():
urlparse.uses_netloc.append("mysql")
try:
# Check to make sure DATABASES is set in settings.py file.
# If not default to {}
if 'DATABASES' not in locals():
DATABASES = {}
if 'CLEARDB_DATABASE_URL' in os.environ:
url = urlparse.urlparse(os.environ['CLEARDB_DATABASE_URL'])
# Ensure default database exists.
DATABASES['default'] = DATABASES.get('default', {})
# Update with environment configuration.
DATABASES['default'].update({
'NAME': url.path[1:],
'USER': url.username,
'PASSWORD': url.password,
'HOST': url.hostname,
'PORT': url.port,
})
if url.scheme == 'mysql':
DATABASES['default']['ENGINE'] = 'django.db.backends.mysql'
except Exception:
print 'Unexpected error:', sys.exc_info()
return DATABASES
def str_for_mysql(s):
if isinstance(s, basestring):
s = s.replace("'", "''")
# Add any more string formatting steps here
return s
def date_for_mysql(d):
d = d.strftime("%Y-%m-%d %H:%M")
# Add any more date formatting steps here
return d
class DB(object):
conn = None
def connect(self):
db_params = get_db_variables()
self.conn = MySQLdb.connect(db_params['default']['HOST'],
db_params['default']['USER'],
db_params['default']['PASSWORD'],
db_params['default']['NAME'],
charset='utf8')
print "DB >> Opened connection to database."
def query(self, sql):
try:
cursor = self.conn.cursor()
cursor.execute(sql)
except (AttributeError, MySQLdb.OperationalError):
self.connect()
cursor = self.conn.cursor()
cursor.execute(sql)
return cursor
def commit(self):
try:
self.conn.commit()
except (AttributeError, MySQLdb.OperationalError):
self.connect()
self.conn.commit()
def close(self):
try:
self.conn.close()
print "DB >> Closed connection to database."
except (AttributeError, MySQLdb.OperationalError):
pass
| mit | 9,130,741,594,736,428,000 | 25.534091 | 75 | 0.554176 | false | 4.284404 | false | false | false |
duplicati/usage-reporter | dbmodel.py | 1 | 1392 | from google.appengine.ext import db
class ReportSet(db.Model):
setid = db.StringProperty(required=False)
uid = db.StringProperty(required=False)
ostype = db.StringProperty(required=False)
osversion = db.StringProperty(required=False)
clrversion = db.StringProperty(required=False)
appname = db.StringProperty(required=False)
appversion = db.StringProperty(required=False)
assembly = db.StringProperty(required=False)
class ReportItem(db.Model):
reportset = db.ReferenceProperty(required=True)
timestamp = db.IntegerProperty(required=False)
eventtype = db.StringProperty(required=False)
count = db.IntegerProperty(required=False)
name = db.StringProperty(required=False)
data = db.TextProperty(required=False)
counted = db.IntegerProperty(required=False)
class AggregateItem(db.Model):
# day, month, year or week
rangetype = db.StringProperty(required=True)
# eg. 2016-01-01
# or 2016-01
# or 2016
# or 2016-w1
rangekey = db.StringProperty(required=True)
timestamp = db.IntegerProperty(required=True)
value_sum = db.IntegerProperty(required=True)
entry_count = db.IntegerProperty(required=True)
ostype = db.StringProperty(required=True)
name = db.StringProperty(required=True)
value = db.StringProperty(required=False)
lastupdated = db.IntegerProperty(required=False)
| mit | 7,266,576,911,102,321,000 | 32.142857 | 52 | 0.73204 | false | 3.824176 | false | false | false |
mapillary/OpenSfM | opensfm/actions/export_bundler.py | 1 | 4106 | import os
import numpy as np
from opensfm import io
from opensfm.dataset import DataSet
def run_dataset(data: DataSet, list_path, bundle_path, undistorted):
"""Export reconstruction to bundler format.
Args:
list_path: txt list of images to export
bundle_path : output path
undistorted : export undistorted reconstruction
"""
udata = data.undistorted_dataset()
default_path = os.path.join(data.data_path, "bundler")
list_file_path = list_path if list_path else default_path
bundle_file_path = bundle_path if bundle_path else default_path
if undistorted:
reconstructions = udata.load_undistorted_reconstruction()
track_manager = udata.load_undistorted_tracks_manager()
images = reconstructions[0].shots.keys()
else:
reconstructions = data.load_reconstruction()
track_manager = data.load_tracks_manager()
images = data.images()
export_bundler(
images, reconstructions, track_manager, bundle_file_path, list_file_path
)
def export_bundler(
image_list, reconstructions, track_manager, bundle_file_path, list_file_path
):
"""
Generate a reconstruction file that is consistent with Bundler's format
"""
io.mkdir_p(bundle_file_path)
io.mkdir_p(list_file_path)
for j, reconstruction in enumerate(reconstructions):
lines = []
lines.append("# Bundle file v0.3")
points = reconstruction.points
shots = reconstruction.shots
num_point = len(points)
num_shot = len(image_list)
lines.append(" ".join(map(str, [num_shot, num_point])))
shots_order = {key: i for i, key in enumerate(image_list)}
# cameras
for shot_id in image_list:
if shot_id in shots:
shot = shots[shot_id]
camera = shot.camera
if shot.camera.projection_type == "brown":
# Will aproximate Brown model, not optimal
focal_normalized = camera.focal_x
else:
focal_normalized = camera.focal
scale = max(camera.width, camera.height)
focal = focal_normalized * scale
k1 = camera.k1
k2 = camera.k2
R = shot.pose.get_rotation_matrix()
t = np.array(shot.pose.translation)
R[1], R[2] = -R[1], -R[2] # Reverse y and z
t[1], t[2] = -t[1], -t[2]
lines.append(" ".join(map(str, [focal, k1, k2])))
for i in range(3):
lines.append(" ".join(map(str, R[i])))
t = " ".join(map(str, t))
lines.append(t)
else:
for _ in range(5):
lines.append("0 0 0")
# tracks
for point in points.values():
coord = point.coordinates
color = list(map(int, point.color))
view_list = track_manager.get_track_observations(point.id)
lines.append(" ".join(map(str, coord)))
lines.append(" ".join(map(str, color)))
view_line = []
for shot_key, obs in view_list.items():
if shot_key in shots.keys():
v = obs.point
shot_index = shots_order[shot_key]
camera = shots[shot_key].camera
scale = max(camera.width, camera.height)
x = v[0] * scale
y = -v[1] * scale
view_line.append(" ".join(map(str, [shot_index, obs.id, x, y])))
lines.append(str(len(view_line)) + " " + " ".join(view_line))
bundle_file = os.path.join(
bundle_file_path, "bundle_r" + str(j).zfill(3) + ".out"
)
with io.open_wt(bundle_file) as fout:
fout.writelines("\n".join(lines) + "\n")
list_file = os.path.join(list_file_path, "list_r" + str(j).zfill(3) + ".out")
with io.open_wt(list_file) as fout:
fout.writelines("\n".join(map(str, image_list)))
| bsd-2-clause | -2,234,257,134,891,496,000 | 35.336283 | 85 | 0.544325 | false | 3.662801 | false | false | false |
przemyslawjanpietrzak/pyMonet | testers/applicative_law_tester.py | 1 | 1646 | from pymonet.utils import identity
class ApplicativeLawTester:
def __init__(self, applicative, value, mapper1, mapper2, get_fn=identity):
self.applicative = applicative
self.value = value
self.mapper1 = mapper1
self.mapper2 = mapper2
self.get_fn = get_fn
def _assert(self, x, y):
assert self.get_fn(x) == self.get_fn(y)
def identity_test(self):
x = self.applicative(identity).ap(self.applicative(self.value))
y = self.applicative(self.value)
self._assert(x, y)
def composition_test(self):
def lambda_fn(fn1):
return lambda fn2: lambda value: fn1(fn2(value))
x = self.applicative(lambda_fn)\
.ap(self.applicative(self.mapper1))\
.ap(self.applicative(self.mapper2))\
.ap(self.applicative(self.value))
y = self.applicative(self.mapper1).ap(
self.applicative(self.mapper2).ap(self.applicative(self.value))
)
self._assert(x, y)
def homomorphism_test(self):
x = self.applicative(self.mapper1).ap(self.applicative(self.value))
y = self.applicative(
self.mapper1(self.value)
)
self._assert(x, y)
def interchange_test(self):
x = self.applicative(self.mapper1).ap(self.applicative(self.value))
y = self.applicative(lambda fn: fn(self.value)).ap(
self.applicative(self.mapper1)
)
self._assert(x, y)
def test(self):
self.identity_test()
self.composition_test()
self.homomorphism_test()
self.interchange_test()
| mit | 2,922,775,157,642,970,000 | 29.481481 | 78 | 0.59356 | false | 3.472574 | true | false | false |
bretttegart/treadmill | integration_tests/test_cell_cli.py | 1 | 5535 | """
Integration test for EC2 cell setup.
"""
import ast
import unittest
import importlib
import click
import click.testing
from botocore.exceptions import ClientError
import time
from treadmill.infra import vpc
class CellCLITest(unittest.TestCase):
"""Tests EC2 cell setup."""
def setUp(self):
self.vpc_name = 'IntegrationTest-' + str(time.time())
self.runner = click.testing.CliRunner()
self.configure_cli = importlib.import_module(
'treadmill.cli.cloud'
).init()
def tearDown(self):
if not self.destroy_attempted:
self.runner.invoke(
self.configure_cli, [
'--domain=treadmill.org',
'delete',
'vpc',
'--vpc-name=' + self.vpc_name,
],
obj={}
)
def test_setup_cell(self):
self.destroy_attempted = False
result_init = self.runner.invoke(
self.configure_cli,
[
'--domain=treadmill.org',
'init',
'vpc',
'--name=' + self.vpc_name
],
obj={}
)
cell_info = {}
vpc_info = {}
try:
vpc_info = ast.literal_eval(result_init.output)
except Exception as e:
if result_init.exception:
print(result_init.exception)
else:
print(e)
self.vpc_id = vpc_info['VpcId']
self.assertIsNotNone(vpc_info['VpcId'])
self.assertEqual(vpc_info['Name'], self.vpc_name)
self.assertEqual(vpc_info['Subnets'], [])
result_cell_init = self.runner.invoke(
self.configure_cli, [
'--domain=treadmill.org',
'init',
'cell',
'--tm-release=0.1.0',
'--key=ms_treadmill_dev',
'--image=RHEL-7.4',
'--vpc-name=' + self.vpc_name,
'--cell-cidr-block=172.23.0.0/24',
'--ipa-admin-password=Tre@dmill1',
'--cell-subnet-name=TreadmillCell',
'--ldap-subnet-name=TreadmillLDAP',
],
obj={}
)
result = {}
try:
result = ast.literal_eval(result_cell_init.output)
except Exception as e:
if result_cell_init.exception:
print(result_cell_init.exception)
else:
print(e)
cell_info = result['Cell']
ldap_info = result['Ldap']
_vpc = vpc.VPC(id=vpc_info['VpcId'])
_vpc_info = _vpc.show()
self.assertEqual(cell_info['VpcId'], vpc_info['VpcId'])
self.assertEqual(cell_info['VpcId'], ldap_info['VpcId'])
self.assertEqual(len(cell_info['Instances']), 6)
self.assertEqual(len(ldap_info['Instances']), 1)
self.assertCountEqual(
[i['Name'] for i in cell_info['Instances']],
['TreadmillMaster1', 'TreadmillMaster2', 'TreadmillMaster3',
'TreadmillZookeeper1', 'TreadmillZookeeper2',
'TreadmillZookeeper3']
)
zk_subnet_ids = set([
i['SubnetId'] for i in cell_info['Instances'] if i['Name'][:-1]
in 'TreadmillZookeeper'
])
master_subnet_ids = set([
i['SubnetId'] for i in cell_info['Instances'] if i['Name'][:-1]
in 'TreadmillMaster'
])
ldap_subnet_ids = set([
i['SubnetId'] for i in ldap_info['Instances'] if i['Name'][:-1]
in 'TreadmillLDAP'
])
self.assertEqual(len(zk_subnet_ids), 1)
self.assertEqual(len(ldap_subnet_ids), 1)
self.assertEqual(len(master_subnet_ids), 1)
self.assertEqual(master_subnet_ids, zk_subnet_ids)
self.assertNotEqual(master_subnet_ids, ldap_subnet_ids)
self.assertEqual(len(_vpc_info['Subnets']), 2)
self.assertCountEqual(_vpc_info['Subnets'],
[list(zk_subnet_ids)[0],
list(ldap_subnet_ids)[0]])
self.runner.invoke(
self.configure_cli, [
'--domain=treadmill.org',
'delete',
'cell',
'--vpc-name=' + self.vpc_name,
'--subnet-name=TreadmillCell',
],
obj={}
)
self.runner.invoke(
self.configure_cli, [
'--domain=treadmill.org',
'delete',
'ldap',
'--vpc-name=' + self.vpc_name,
'--subnet-name=TreadmillLDAP',
],
obj={}
)
_vpc.instances = None
_vpc.subnet_ids = []
_vpc_info = _vpc.show()
self.assertEqual(len(_vpc_info['Instances']), 0)
self.assertEqual(len(_vpc_info['Subnets']), 0)
self.runner.invoke(
self.configure_cli, [
'--domain=treadmill.org',
'delete',
'vpc',
'--vpc-name=' + self.vpc_name
],
obj={}
)
self.destroy_attempted = True
with self.assertRaises(ClientError) as error:
_vpc.ec2_conn.describe_vpcs(
VpcIds=[vpc_info['VpcId']]
)
self.assertEqual(
error.exception.response['Error']['Code'],
'InvalidVpcID.NotFound'
)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 5,092,826,934,716,075,000 | 29.245902 | 75 | 0.486179 | false | 3.956397 | true | false | false |
tkf/factlog | factlog/accessinfo.py | 1 | 2423 | # Copyright (c) 2013- Takafumi Arakaki
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from .utils.strutils import remove_prefix, get_lines_at_point
from .filetitle import write_path_and_title
class AccessInfo(object):
"""
Access information object.
"""
__slots__ = ['path', 'point', 'recorded', 'type', 'showpath']
def __init__(self, path, point, recorded, type):
self.path = self.showpath = path
self.point = point
self.recorded = recorded
self.type = type
def _set_relative_path(self, absunder):
"""
Set :attr:`showpath` and return the newly set value.
:attr:`showpath` is set the relative path of :attr:`path` from
one of the path in `absunder`.
"""
self.showpath = remove_prefix(absunder, self.path)
return self.showpath
def _get_lines_at_point(self, pre_lines, post_lines):
with open(self.path) as f:
return get_lines_at_point(
f.read(), self.point, pre_lines, post_lines)
def write_paths_and_lines(self, file, pre_lines=0, post_lines=0,
newline='\n', separator=':'):
"""
Write :attr:`showpath` and lines around :attr:`point` to `file`.
"""
for (lineno, line) in self._get_lines_at_point(pre_lines, post_lines):
file.write(self.showpath)
file.write(separator)
file.write(str(lineno))
file.write(separator)
file.write(line)
file.write(newline)
def write_path_and_title(self, file, newline='\n', separator=':',
**kwds):
"""
Call :func:`.filetitle.write_path_and_title`.
"""
write_path_and_title(file, self.path, self.showpath,
newline, separator, **kwds)
| lgpl-3.0 | -1,573,684,291,286,381,800 | 33.614286 | 78 | 0.613289 | false | 3.889246 | false | false | false |
chadgates/thetravelling2 | wedding/models.py | 1 | 5554 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.db import models
from django.conf import settings
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
import uuid
from imagekit.models import ImageSpecField
from imagekit.processors import ResizeToFit, ResizeCanvas, ResizeToFill
class TimeStampedModel(models.Model):
# Abstract base class model that provides self-updating created and modified fields
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
@python_2_unicode_compatible
class Rsvp(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
date = models.DateTimeField(auto_now_add=True)
will_attend = models.NullBooleanField(verbose_name=_('Attendance'), null=True, blank=True)
guest2 = models.CharField(verbose_name=_('Name, Surname'), max_length=100, blank=True)
guest3 = models.CharField(verbose_name=_('Name, Surname'), max_length=100, blank=True)
guest4 = models.CharField(verbose_name=_('Name, Surname'), max_length=100, blank=True)
def __str__(self):
return self.user.name + ': ' + ("is coming" if self.will_attend else "not coming")
def get_absolute_url(self):
return reverse('wedding:rsvp-detail', kwargs={'username': self.user.username})
class Meta:
verbose_name = "RSVP"
verbose_name_plural = "RSVPs"
permissions = (
("view_list", "Can see the RSVP list"),
)
class Gift(TimeStampedModel):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name = models.CharField(verbose_name=_('Name'), max_length=300)
description = models.TextField(verbose_name=_('Description'), null=True, blank=True)
link = models.TextField(verbose_name=_('Link'), null=True, blank=True)
price = models.DecimalField(verbose_name=_('Price'), max_digits=7, decimal_places=2)
gift_is_part = models.BooleanField(verbose_name=_('Gift is part'), default=False)
max_parts = models.PositiveIntegerField(verbose_name=_('Maximum number of parts'))
taken_parts = models.PositiveIntegerField(verbose_name=_('Number of parts taken'), default=0)
img = models.ImageField(blank=True, null=True)
img_catalog = ImageSpecField(source='img', processors=[ResizeToFit(800, 600), ResizeCanvas(800, 600)],
format='JPEG', options={'quality': 60})
img_miniature = ImageSpecField(source='img', processors=[ResizeToFill(60, 60)],
format='JPEG', options={'quality': 60})
def is_available(self):
if self.taken_parts < self.max_parts:
return True
else:
return False
def avail_parts(self):
return self.max_parts - self.taken_parts
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse("wedding:gift-detail", kwargs={'pk': self.pk})
class Meta:
verbose_name = "Gift"
verbose_name_plural = "Gifts"
permissions = (
("edit", "Can edit the Gift list"),
)
class GiftOrder(TimeStampedModel):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
voucher_from = models.CharField(verbose_name=_('Voucher is from'), max_length=300)
voucher_greeting = models.TextField(verbose_name=_('Voucher Greeting'), null=True, blank=True)
voucher_senddirect = models.BooleanField(verbose_name=_('Send voucher directly'), default=False)
payment_received = models.BooleanField(verbose_name=_('Payment received'), default=False)
voucher_issued = models.BooleanField(verbose_name=_('Voucher issued'), default=False)
total_price = models.DecimalField(verbose_name=_('Total price'), max_digits=10, decimal_places=2, default=0.00)
def __str__(self):
return self.user.name + "/" + "{:%Y/%m/%d}".format(self.created) + "/" + self.total_price.__str__()
class GiftOrderItem(TimeStampedModel):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
gift = models.ForeignKey(Gift)
giftorder = models.ForeignKey(GiftOrder)
quantity = models.PositiveIntegerField(verbose_name=_('Item count'))
price = models.DecimalField(verbose_name=_('Price'), max_digits=7, decimal_places=2, default=0.00)
@property
def price_total(self):
return self.quantity * self.price
def __str__(self):
return self.gift.name
class Cart(TimeStampedModel):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
class CartItem(TimeStampedModel):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
gift = models.ForeignKey(Gift)
quantity = models.PositiveIntegerField(verbose_name=_('Item count'))
def get_absolute_url(self):
return reverse("wedding:cart-detail", kwargs={'pk': self.pk})
@property
def price_total(self):
return self.quantity * self.gift.price
def __str__(self):
return self.gift.name + " " + self.id.__str__()
class GiftOrderStatus(GiftOrder):
class Meta:
proxy = True
def get_absolute_url(self):
return reverse("wedding:orderstatus-detail", kwargs={'pk': self.pk})
| mit | -6,112,628,031,234,148,000 | 38.956835 | 115 | 0.67987 | false | 3.742588 | false | false | false |
googleapis/python-recommendations-ai | google/cloud/recommendationengine_v1beta1/services/catalog_service/transports/grpc_asyncio.py | 1 | 18907 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.recommendationengine_v1beta1.types import catalog
from google.cloud.recommendationengine_v1beta1.types import catalog_service
from google.cloud.recommendationengine_v1beta1.types import import_
from google.longrunning import operations_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
from .base import CatalogServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import CatalogServiceGrpcTransport
class CatalogServiceGrpcAsyncIOTransport(CatalogServiceTransport):
"""gRPC AsyncIO backend transport for CatalogService.
Service for ingesting catalog information of the customer's
website.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(
cls,
host: str = "recommendationengine.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
def __init__(
self,
*,
host: str = "recommendationengine.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
self._operations_client = None
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsAsyncClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Sanity check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsAsyncClient(
self.grpc_channel
)
# Return the client from cache.
return self._operations_client
@property
def create_catalog_item(
self,
) -> Callable[
[catalog_service.CreateCatalogItemRequest], Awaitable[catalog.CatalogItem]
]:
r"""Return a callable for the create catalog item method over gRPC.
Creates a catalog item.
Returns:
Callable[[~.CreateCatalogItemRequest],
Awaitable[~.CatalogItem]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_catalog_item" not in self._stubs:
self._stubs["create_catalog_item"] = self.grpc_channel.unary_unary(
"/google.cloud.recommendationengine.v1beta1.CatalogService/CreateCatalogItem",
request_serializer=catalog_service.CreateCatalogItemRequest.serialize,
response_deserializer=catalog.CatalogItem.deserialize,
)
return self._stubs["create_catalog_item"]
@property
def get_catalog_item(
self,
) -> Callable[
[catalog_service.GetCatalogItemRequest], Awaitable[catalog.CatalogItem]
]:
r"""Return a callable for the get catalog item method over gRPC.
Gets a specific catalog item.
Returns:
Callable[[~.GetCatalogItemRequest],
Awaitable[~.CatalogItem]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_catalog_item" not in self._stubs:
self._stubs["get_catalog_item"] = self.grpc_channel.unary_unary(
"/google.cloud.recommendationengine.v1beta1.CatalogService/GetCatalogItem",
request_serializer=catalog_service.GetCatalogItemRequest.serialize,
response_deserializer=catalog.CatalogItem.deserialize,
)
return self._stubs["get_catalog_item"]
@property
def list_catalog_items(
self,
) -> Callable[
[catalog_service.ListCatalogItemsRequest],
Awaitable[catalog_service.ListCatalogItemsResponse],
]:
r"""Return a callable for the list catalog items method over gRPC.
Gets a list of catalog items.
Returns:
Callable[[~.ListCatalogItemsRequest],
Awaitable[~.ListCatalogItemsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_catalog_items" not in self._stubs:
self._stubs["list_catalog_items"] = self.grpc_channel.unary_unary(
"/google.cloud.recommendationengine.v1beta1.CatalogService/ListCatalogItems",
request_serializer=catalog_service.ListCatalogItemsRequest.serialize,
response_deserializer=catalog_service.ListCatalogItemsResponse.deserialize,
)
return self._stubs["list_catalog_items"]
@property
def update_catalog_item(
self,
) -> Callable[
[catalog_service.UpdateCatalogItemRequest], Awaitable[catalog.CatalogItem]
]:
r"""Return a callable for the update catalog item method over gRPC.
Updates a catalog item. Partial updating is
supported. Non-existing items will be created.
Returns:
Callable[[~.UpdateCatalogItemRequest],
Awaitable[~.CatalogItem]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_catalog_item" not in self._stubs:
self._stubs["update_catalog_item"] = self.grpc_channel.unary_unary(
"/google.cloud.recommendationengine.v1beta1.CatalogService/UpdateCatalogItem",
request_serializer=catalog_service.UpdateCatalogItemRequest.serialize,
response_deserializer=catalog.CatalogItem.deserialize,
)
return self._stubs["update_catalog_item"]
@property
def delete_catalog_item(
self,
) -> Callable[
[catalog_service.DeleteCatalogItemRequest], Awaitable[empty_pb2.Empty]
]:
r"""Return a callable for the delete catalog item method over gRPC.
Deletes a catalog item.
Returns:
Callable[[~.DeleteCatalogItemRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_catalog_item" not in self._stubs:
self._stubs["delete_catalog_item"] = self.grpc_channel.unary_unary(
"/google.cloud.recommendationengine.v1beta1.CatalogService/DeleteCatalogItem",
request_serializer=catalog_service.DeleteCatalogItemRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_catalog_item"]
@property
def import_catalog_items(
self,
) -> Callable[
[import_.ImportCatalogItemsRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the import catalog items method over gRPC.
Bulk import of multiple catalog items. Request
processing may be synchronous. No partial updating
supported. Non-existing items will be created.
Operation.response is of type ImportResponse. Note that
it is possible for a subset of the items to be
successfully updated.
Returns:
Callable[[~.ImportCatalogItemsRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "import_catalog_items" not in self._stubs:
self._stubs["import_catalog_items"] = self.grpc_channel.unary_unary(
"/google.cloud.recommendationengine.v1beta1.CatalogService/ImportCatalogItems",
request_serializer=import_.ImportCatalogItemsRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["import_catalog_items"]
__all__ = ("CatalogServiceGrpcAsyncIOTransport",)
| apache-2.0 | -2,706,880,795,725,761,000 | 42.867749 | 95 | 0.627545 | false | 4.688073 | false | false | false |
uber/pyro | tests/distributions/test_pickle.py | 1 | 3192 | # Copyright (c) 2017-2019 Uber Technologies, Inc.
# SPDX-License-Identifier: Apache-2.0
import inspect
import io
import pytest
import pickle
import torch
import pyro.distributions as dist
from pyro.distributions.torch_distribution import TorchDistributionMixin
from tests.common import xfail_param
# Collect distributions.
BLACKLIST = [
dist.TorchDistribution,
dist.ExponentialFamily,
dist.OMTMultivariateNormal,
]
XFAIL = {
dist.Gumbel: xfail_param(dist.Gumbel, reason='cannot pickle weakref'),
}
DISTRIBUTIONS = [d for d in dist.__dict__.values()
if isinstance(d, type)
if issubclass(d, TorchDistributionMixin)
if d not in BLACKLIST]
DISTRIBUTIONS.sort(key=lambda d: d.__name__)
DISTRIBUTIONS = [XFAIL.get(d, d) for d in DISTRIBUTIONS]
# Provide default args if Dist(1, 1, ..., 1) is known to fail.
ARGS = {
dist.AVFMultivariateNormal: [torch.zeros(3), torch.eye(3), torch.rand(2, 4, 3)],
dist.Bernoulli: [0.5],
dist.Binomial: [2, 0.5],
dist.Categorical: [torch.ones(2)],
dist.Delta: [torch.tensor(0.)],
dist.Dirichlet: [torch.ones(2)],
dist.GaussianScaleMixture: [torch.ones(2), torch.ones(3), torch.ones(3)],
dist.Geometric: [0.5],
dist.Independent: [dist.Normal(torch.zeros(2), torch.ones(2)), 1],
dist.LowRankMultivariateNormal: [torch.zeros(2), torch.ones(2, 2), torch.ones(2)],
dist.MaskedMixture: [torch.tensor([1, 0]).bool(), dist.Normal(0, 1), dist.Normal(0, 2)],
dist.MixtureOfDiagNormals: [torch.ones(2, 3), torch.ones(2, 3), torch.ones(2)],
dist.MixtureOfDiagNormalsSharedCovariance: [torch.ones(2, 3), torch.ones(3), torch.ones(2)],
dist.Multinomial: [2, torch.ones(2)],
dist.MultivariateNormal: [torch.ones(2), torch.eye(2)],
dist.OneHotCategorical: [torch.ones(2)],
dist.RelaxedBernoulli: [1.0, 0.5],
dist.RelaxedBernoulliStraightThrough: [1.0, 0.5],
dist.RelaxedOneHotCategorical: [1., torch.ones(2)],
dist.RelaxedOneHotCategoricalStraightThrough: [1., torch.ones(2)],
dist.TransformedDistribution: [dist.Normal(0, 1), torch.distributions.ExpTransform()],
dist.Uniform: [0, 1],
dist.VonMises3D: [torch.tensor([1., 0., 0.])],
}
@pytest.mark.parametrize('Dist', DISTRIBUTIONS)
def test_pickle(Dist):
if Dist in ARGS:
args = ARGS[Dist]
else:
# Optimistically try to initialize with Dist(1, 1, ..., 1).
try:
# Python 3.6+
spec = list(inspect.signature(Dist.__init__).parameters.values())
nargs = sum(1 for p in spec if p.default is p.empty) - 1
except AttributeError:
# Python 2.6-3.5
spec = inspect.getargspec(Dist.__init__)
nargs = len(spec.args) - 1 - (len(spec.defaults) if spec.defaults else 0)
args = (1,) * nargs
try:
dist = Dist(*args)
except Exception:
pytest.skip(msg='cannot construct distribution')
buffer = io.BytesIO()
# Note that pickling torch.Size() requires protocol >= 2
torch.save(dist, buffer, pickle_protocol=pickle.HIGHEST_PROTOCOL)
buffer.seek(0)
deserialized = torch.load(buffer)
assert isinstance(deserialized, Dist)
| apache-2.0 | -1,041,571,236,107,058,800 | 37 | 96 | 0.657268 | false | 3.237323 | false | false | false |
mganeva/mantid | qt/python/mantidqt/widgets/fitpropertybrowser/fitpropertybrowser.py | 1 | 13747 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2017 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
# This file is part of the mantidqt package
#
#
from __future__ import (print_function, absolute_import, unicode_literals)
import re
from qtpy.QtCore import Qt, Signal, Slot
from mantid.api import AlgorithmManager
from mantid.simpleapi import mtd
from mantidqt.utils.qt import import_qt
from .interactive_tool import FitInteractiveTool
BaseBrowser = import_qt('.._common', 'mantidqt.widgets', 'FitPropertyBrowser')
class FitPropertyBrowserBase(BaseBrowser):
def __init__(self, parent=None):
super(FitPropertyBrowserBase, self).__init__(parent)
self.init()
class FitPropertyBrowser(FitPropertyBrowserBase):
"""
A wrapper around C++ FitPropertyBrowser with added graphical peak editing tool.
"""
closing = Signal()
pattern_fittable_curve = re.compile(r'(.+?): spec (\d+)')
def __init__(self, canvas, toolbar_state_checker, parent=None):
super(FitPropertyBrowser, self).__init__(parent)
self.init()
self.setFeatures(self.DockWidgetMovable)
self.canvas = canvas
self.workspace_labels = []
# The toolbar state checker to be passed to the peak editing tool
self.toolbar_state_checker = toolbar_state_checker
# The peak editing tool
self.tool = None
# Pyplot lines for the fit result curves
self.fit_result_lines = []
# Pyplot line for the guess curve
self.guess_line = None
# Map the indices of the markers in the peak editing tool to the peak function prefixes (in the form f0.f1...)
self.peak_ids = {}
self._connect_signals()
def _connect_signals(self):
self.startXChanged.connect(self.move_start_x)
self.endXChanged.connect(self.move_end_x)
self.algorithmFinished.connect(self.fitting_done_slot)
self.changedParameterOf.connect(self.peak_changed_slot)
self.removeFitCurves.connect(self.clear_fit_result_lines_slot, Qt.QueuedConnection)
self.plotGuess.connect(self.plot_guess_slot, Qt.QueuedConnection)
self.functionChanged.connect(self.function_changed_slot, Qt.QueuedConnection)
@classmethod
def can_fit_spectra(cls, labels):
"""
Determine if the spectra referred to by the plot labels can be used in this fit browser.
:param labels: A list of curve labels which can identify spectra in a workspace.
:return: True or False
"""
return any(map(lambda s: re.match(cls.pattern_fittable_curve, s), labels))
def closeEvent(self, event):
"""
Emit self.closing signal used by figure manager to put the menu buttons in correct states
"""
self.closing.emit()
BaseBrowser.closeEvent(self, event)
def show(self):
"""
Override the base class method. Initialise the peak editing tool.
"""
allowed_spectra = {}
for label in self.workspace_labels:
a_match = re.match(self.pattern_fittable_curve, label)
if a_match:
name, spec = a_match.group(1), int(a_match.group(2))
spec_list = allowed_spectra.get(name, [])
spec_list.append(spec)
allowed_spectra[name] = spec_list
if len(allowed_spectra) > 0:
for name, spec_list in allowed_spectra.items():
self.addAllowedSpectra(name, spec_list)
self.tool = FitInteractiveTool(self.canvas, self.toolbar_state_checker,
current_peak_type=self.defaultPeakType())
self.tool.fit_start_x_moved.connect(self.setStartX)
self.tool.fit_end_x_moved.connect(self.setEndX)
self.tool.peak_added.connect(self.peak_added_slot)
self.tool.peak_moved.connect(self.peak_moved_slot)
self.tool.peak_fwhm_changed.connect(self.peak_fwhm_changed_slot)
self.tool.peak_type_changed.connect(self.setDefaultPeakType)
self.tool.add_background_requested.connect(self.add_function_slot)
self.tool.add_other_requested.connect(self.add_function_slot)
self.setXRange(self.tool.fit_start_x.x, self.tool.fit_end_x.x)
super(FitPropertyBrowser, self).show()
self.setPeakToolOn(True)
self.canvas.draw()
def hide(self):
"""
Override the base class method. Hide the peak editing tool.
"""
if self.tool is not None:
self.tool.fit_start_x_moved.disconnect()
self.tool.fit_end_x_moved.disconnect()
self.tool.disconnect()
self.tool = None
self.canvas.draw()
super(FitPropertyBrowser, self).hide()
self.setPeakToolOn(False)
def move_start_x(self, xd):
"""
Let the tool know that StartX has changed.
:param xd: New value of StartX
"""
if self.tool is not None:
self.tool.move_start_x(xd)
def move_end_x(self, xd):
"""
Let the tool know that EndX has changed.
:param xd: New value of EndX
"""
if self.tool is not None:
self.tool.move_end_x(xd)
def clear_fit_result_lines(self):
"""
Delete the fit curves.
"""
for lin in self.fit_result_lines:
try:
lin.remove()
except ValueError:
# workspace replacement could invalidate these references
pass
self.fit_result_lines = []
self.update_legend()
def get_lines(self):
"""
Get all lines in the connected plot.
"""
return self.get_axes().get_lines()
def get_axes(self):
"""
Get the pyplot's Axes object.
"""
return self.canvas.figure.get_axes()[0]
def update_legend(self):
"""
This needs to be called to update plot's legend after removing lines.
"""
axes = self.get_axes()
if axes.legend_ is not None:
axes.legend()
def plot_guess(self):
"""
Plot the guess curve.
"""
from mantidqt.plotting.functions import plot
fun = self.getFittingFunction()
ws_name = self.workspaceName()
if fun == '' or ws_name == '':
return
ws_index = self.workspaceIndex()
out_ws_name = '{}_guess'.format(ws_name)
alg = AlgorithmManager.createUnmanaged('EvaluateFunction')
alg.setChild(True)
alg.initialize()
alg.setProperty('Function', fun)
alg.setProperty('InputWorkspace', ws_name)
alg.setProperty('WorkspaceIndex', ws_index)
alg.setProperty('OutputWorkspace', out_ws_name)
alg.execute()
out_ws = alg.getProperty('OutputWorkspace').value
plot([out_ws], wksp_indices=[1], fig=self.canvas.figure, overplot=True, plot_kwargs={'label': out_ws_name})
for lin in self.get_lines():
if lin.get_label().startswith(out_ws_name):
self.guess_line = lin
self.setTextPlotGuess('Remove Guess')
self.canvas.draw()
def remove_guess(self):
"""
Remove the guess curve from the plot.
"""
if self.guess_line is None:
return
self.guess_line.remove()
self.guess_line = None
self.update_legend()
self.setTextPlotGuess('Plot Guess')
self.canvas.draw()
def update_guess(self):
"""
Update the guess curve.
"""
if self.guess_line is None:
return
self.remove_guess()
self.plot_guess()
def add_to_menu(self, menu):
"""
Add the relevant actions to a menu
:param menu: A menu to hold the actions
:return: The menu passed to us
"""
if self.tool is not None:
self.tool.add_to_menu(menu, peak_names=self.registeredPeaks(),
current_peak_type=self.defaultPeakType(),
background_names=self.registeredBackgrounds(),
other_names=self.registeredOthers())
return menu
@Slot()
def clear_fit_result_lines_slot(self):
"""
Clear the fit lines.
"""
self.clear_fit_result_lines()
if self.tool is not None:
self.canvas.draw()
@Slot(str)
def fitting_done_slot(self, name):
"""
This is called after Fit finishes to update the fit curves.
:param name: The name of Fit's output workspace.
"""
from mantidqt.plotting.functions import plot
ws = mtd[name]
# Keep local copy of the original lines
original_lines = self.get_lines()
self.clear_fit_result_lines()
plot([ws], wksp_indices=[1, 2], fig=self.canvas.figure, overplot=True)
name += ':'
for lin in self.get_lines():
if lin.get_label().startswith(name):
self.fit_result_lines.append(lin)
# Add properties back to the lines
new_lines = self.get_lines()
for new_line, old_line in zip(new_lines, original_lines):
new_line.update_from(old_line)
# Now update the legend to make sure it changes to the old properties
self.get_axes().legend().draggable()
@Slot(int, float, float, float)
def peak_added_slot(self, peak_id, centre, height, fwhm):
"""
Respond to a signal from the peak editing tool that a peak is added.
Add a peak function to the browser.
:param peak_id: An index of a peak marker in the peak editing tool.
:param centre: Peak centre
:param height: Peak height (peak maximum)
:param fwhm: Peak's full width at half maximum
"""
fun = self.addFunction(self.defaultPeakType())
self.setPeakCentreOf(fun, centre)
self.setPeakHeightOf(fun, height)
self.setPeakFwhmOf(fun, fwhm)
self.peak_ids[peak_id] = fun
@Slot(int, float, float)
def peak_moved_slot(self, peak_id, centre, height):
"""
Respond to the peak editing tool moving peak's top to a new position.
:param peak_id: Peak's index/id
:param centre: New peak centre
:param height: New peak height
"""
fun = self.peak_ids[peak_id]
self.setPeakCentreOf(fun, centre)
self.setPeakHeightOf(fun, height)
self.update_guess()
@Slot(int, float)
def peak_fwhm_changed_slot(self, peak_id, fwhm):
"""
Respond to the peak editing tool changing peak's width.
:param peak_id: Peak's index/id
:param fwhm: New peak full width at half maximum.
"""
fun = self.peak_ids[peak_id]
self.setPeakFwhmOf(fun, fwhm)
self.update_guess()
@Slot(str)
def peak_changed_slot(self, fun):
"""
Update the peak marker in the peak editing tool after peak's parameters change in the browser.
:param fun: A prefix of the function that changed.
"""
for peak_id, prefix in self.peak_ids.items():
if prefix == fun:
self.tool.update_peak(peak_id, self.getPeakCentreOf(prefix),
self.getPeakHeightOf(prefix),
self.getPeakFwhmOf(prefix))
self.update_guess()
@Slot(str)
def add_function_slot(self, fun_name):
"""
Respond to a signal from the peak editing tool to add a new function.
:param fun_name: A registered name of a fit function
"""
self.addFunction(fun_name)
@Slot()
def plot_guess_slot(self):
"""
Toggle the guess plot.
"""
if self.guess_line is None:
self.plot_guess()
else:
self.remove_guess()
@Slot()
def function_changed_slot(self):
"""
Update the peak editing tool after function structure has changed in the browser: functions added
and/or removed.
"""
peaks_to_add = []
peaks = {v: k for k, v in self.peak_ids.items()}
for prefix in self.getPeakPrefixes():
c, h, w = self.getPeakCentreOf(prefix), self.getPeakHeightOf(prefix), self.getPeakFwhmOf(prefix)
if prefix in peaks:
self.tool.update_peak(peaks[prefix], c, h, w)
del peaks[prefix]
else:
peaks_to_add.append((prefix, c, h, w))
for i in peaks.values():
del self.peak_ids[i]
need_update_markers = len(peaks_to_add) > 0
if not need_update_markers:
plist = self.tool.get_peak_list()
for i, c, h, w in plist:
prefix = self.peak_ids.get(i)
if prefix is None:
need_update_markers = True
break
if self.getPeakCentreOf(prefix) != c or self.getPeakHeightOf(prefix) != h or\
self.getPeakFwhmOf(prefix) != w:
need_update_markers = True
break
if need_update_markers:
peak_ids, peak_updates = self.tool.update_peak_markers(self.peak_ids.keys(), peaks_to_add)
self.peak_ids.update(peak_ids)
for prefix, c, h, w in peak_updates:
self.setPeakCentreOf(prefix, c)
self.setPeakHeightOf(prefix, h)
self.setPeakFwhmOf(prefix, w)
self.update_guess()
| gpl-3.0 | -147,117,974,511,443,840 | 34.986911 | 118 | 0.588419 | false | 3.874577 | false | false | false |
abutcher/Taboot | taboot/output.py | 1 | 13108 | # -*- coding: utf-8 -*-
# Taboot - Client utility for performing deployments with Func.
# Copyright © 2009,2011, Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class _FileLikeOutputObject(object):
"""
A file-like parent class.
"""
import exceptions
import time as _time
defaults = None
starttime = None
def __init__(self, *args, **kwargs):
"""
Creates an instance of a file-like object.
:Parameters:
- `args`: all non-keyword arguments.
- `kwargs`: all keyword arguments.
"""
import ConfigParser
import os.path
if _FileLikeOutputObject.defaults is None:
if os.path.expanduser("~/.taboot.conf"):
_FileLikeOutputObject.defaults = ConfigParser.ConfigParser()
_FileLikeOutputObject.defaults.read(
os.path.expanduser("~/.taboot.conf"))
# Only set the start time once, not for each logger instance
if _FileLikeOutputObject.starttime is None:
import datetime
_FileLikeOutputObject.starttime = datetime.datetime.today()
self._pos = 0L
self._closed = False
self._setup(*args, **kwargs)
def _setup(self, *args, **kwargs):
"""
Implementation specific setup.
:Parameters:
- `args`: all non-keyword arguments.
- `kwargs`: all keyword arguments.
"""
pass
def flush(self):
"""
We are not buffering so we always just return None.
"""
return None
def read(self, *args, **kwargs):
"""
We are an output only file-like object. Raise exception.
:Parameters:
- `args`: all non-keyword arguments.
- `kwargs`: all keyword arguments.
"""
raise self.exceptions.NotImplementedError('Object for output only.')
def tell(self):
"""
Returns the position of the file-like object.
"""
return self._pos
def truncate(self, size):
"""
We are an output only file-like object. Raise exception.
:Parameters:
- `size`: size to truncate to.
"""
raise self.exceptions.NotImplementedError(
'This does not support truncate.')
def writelines(self, sequence):
"""
Writes a sequence of lines.
:Parameters:
- `sequence`: iterable sequence of data to write.
"""
for item in sequence:
self.write(item)
def write(self, item):
"""
Writer wrapper (not rapper, beav). Simply calls _write which is
implementation specific and updates the position.
:Parameters:
- `item`: the item to write.
"""
self._write(item)
self._pos += 1
def _write(self, item):
"""
Implementation of writing data.
:Parameters:
- `item`: the item to write.
"""
raise self.exceptions.NotImplementedError(
'_write must be overriden.')
def close(self):
"""
Close wrapper (again, not rapper, beav). Simply calls _close which
is implementation specific and updates the closed property.
"""
self._close()
self._closed = True
def _close(self):
"""
Implementation of closing the file-like object.
By default nothing occurs.
"""
pass
# Read aliases
readline = read
readlines = read
xreadlines = read
seek = read
# Read-only Properties
closed = property(lambda self: self._closed)
timestamp = property(lambda self: self._time.strftime(
"%Y-%m-%d %H:%M:%S", self._time.localtime()))
class CLIOutput(_FileLikeOutputObject):
"""
Output a :class:`taboot.tasks.TaskResult` to the command line
with pretty formatting and colors.
"""
def _setup(self, host, task):
"""
Implementation specific setup for outputting to the CLI.
:Parameters:
- `host`: name of the host
- `task`: name of the task
"""
import Colors
import sys
self._c = Colors.Colors()
self._sys = sys
self._sys.stdout.write('%s:\n' % (
self._c.format_string(host, 'blue')))
self._sys.stdout.write('%s Starting Task[%s]\n' % (
self.timestamp, self._c.format_string(task, 'white')))
def _write(self, result):
"""
Implementation of writing to the CLI.
:Parameters:
- `result`: result object to inspect and write
"""
import types
# Set output color
output_color = 'red'
if result.success:
output_color = 'green'
self._sys.stdout.write("%s:\n" % (
self._c.format_string(result.host, 'blue')))
self._sys.stdout.write("%s Finished Task[%s]:\n" % (
self.timestamp, self._c.format_string(
result.task, output_color)))
if isinstance(result.output, types.ListType):
for r in result.output:
self._sys.stdout.write("%s\n" % self._c.format_string(
r.strip(), output_color))
else:
self._sys.stdout.write("%s\n" % self._c.format_string(
result.output.strip(), output_color))
class LogOutput(_FileLikeOutputObject):
"""
Output a :class:`taboot.tasks.TaskResult` to a logfile.
"""
def _setup(self, host, task, logfile='taboot.log'):
"""
Implementation specific setup for outputting to a log.
:Parameters:
- `logfile`: name of the logfile to write to.
"""
self._logfile = logfile
if self._logfile in ('-', 'stdout', '1'):
import sys
self._log_fd = sys.stdout
else:
self._log_fd = open(logfile, 'a')
self._log_fd.write('%s:\n%s Starting Task[%s]\n\n' % (
host, self.timestamp, task))
def _write(self, result):
"""
Implementation of writing to a log.
:Parameters:
- `result`: result object to inspect and write
"""
import types
if result.success:
success_str = 'OK'
else:
success_str = 'FAIL'
self._log_fd.write("%s:\n%s Finished Task[%s]: %s\n" % (
result.host, self.timestamp, result.task, success_str))
if isinstance(result.output, types.ListType):
for r in result.output:
self._log_fd.write("%s\n\n" % r.strip())
else:
self._log_fd.write("%s\n\n" % result.output.strip())
class EmailOutput(_FileLikeOutputObject):
"""
Output a :class:`taboot.tasks.TaskResult` to a logfile.
"""
def _setup(self, to_addr, from_addr='taboot@redhat.com'):
"""
Implementation specific setup for outputting to a log.
:Parameters:
- `to_addr`: who to send the email to.
- `from_addr`: who the email is from.
"""
try:
import cStringIO as StringIO
except ImportError, ie:
import StringIO
self._to_addr = to_addr
self._from_addr = from_addr
self._buffer = StringIO.StringIO()
def _write(self, result):
"""
Implementation of writing out to an email.
:Parameters:
- `result`: result object to inspect and write
"""
if result.success:
success_str = 'OK'
else:
success_str = 'FAIL'
self._buffer.write("%s: %s" % (task_result.task, success_str))
def flush(self):
"""
Flushing sends the email with the buffer.
"""
import smtplib
from email.mime.text import MIMEText
self._buffer.flush()
msg = self.MIMEText(self._buffer.read())
msg['Subject'] = task_result.host
msg['From'] = self._from_addr
msg['To'] = self._to_addr
smtp = self.smtplib.SMTP()
smtp.connect()
smtp.sendmail(self._from_addr, [self._to_addr], msg.as_string())
smtp.close()
def __del__(self):
"""
If the buffer is not empty before destroying, flush.
"""
if self._buffer.pos < self._buffer.len:
self.flush()
class HTMLOutput(_FileLikeOutputObject):
"""
Output a :class:`taboot.tasks.TaskResult` to the command line
with pretty formatting and colors.
"""
logfile_path = None
def _expand_starttime(self, param):
"""
Expand any instances of "%s" in `param`
"""
if '%s' in param:
p = param % HTMLOutput.starttime
return p.replace(" ", "-")
else:
return param
def _setup(self, host, task, logfile="taboot-%s.html", destdir="."):
"""
Implementation specific setup for outputting to an HTML file.
:Parameters:
- `host`: name of the host
- `task`: name of the task
- `logfile`: name of the file to log to, '%s' is substituted
with a datestamp
- `destdir`: directory in which to save the log file to
"""
import Colors
import sys
import os.path
import os
_default_logfile = "taboot-%s.html"
_default_destdir = "."
# Pick if the parameter is changed
# Pick if above is false and logfile is set in defaults
# Else, use parameter
if not logfile == _default_logfile:
_logfile = logfile
elif HTMLOutput.defaults is not None and \
HTMLOutput.defaults.has_option("HTMLOutput", "logfile"):
_logfile = HTMLOutput.defaults.get("HTMLOutput", "logfile")
else:
_logfile = logfile
# Expand %s into a time stamp if necessary
_logfile = self._expand_starttime(_logfile)
if not destdir == _default_destdir:
_destdir = destdir
elif HTMLOutput.defaults is not None and \
HTMLOutput.defaults.has_option("HTMLOutput", "destdir"):
_destdir = HTMLOutput.defaults.get("HTMLOutput", "destdir")
else:
_destdir = destdir
# Figured it all out, now we join them together!
self._logfile_path = os.path.join(_destdir, _logfile)
if not os.path.exists(_destdir):
os.makedirs(_destdir, 0755)
self._c = Colors.HTMLColors()
self._log_fd = open(self._logfile_path, 'a')
# Lets only print this when it is set or changed
if HTMLOutput.logfile_path is None or \
not HTMLOutput.logfile_path == self._logfile_path:
sys.stderr.write("Logging HTML Output to %s\n" % \
self._logfile_path)
HTMLOutput.logfile_path = self._logfile_path
sys.stderr.flush()
# Log the start of this task
name = self._fmt_anchor(self._fmt_hostname(host))
start_msg = """<p><tt>%s:</tt></p>
<p><tt>%s Starting Task[%s]\n</tt>""" % (name, self.timestamp, task)
self._log_fd.write(start_msg)
self._log_fd.flush()
def _fmt_anchor(self, text):
"""
Format an #anchor and a clickable link to it
"""
h = hash(self.timestamp)
anchor_str = "<a name='%s' href='#%s'>%s</a>" % (h, h, text)
return anchor_str
def _fmt_hostname(self, n):
"""
Standardize the hostname formatting
"""
return "<b>%s</b>" % self._c.format_string(n, 'blue')
def _write(self, result):
"""
DO IT!
"""
import types
import sys
name = self._fmt_hostname(result.host)
if result.success:
success_str = 'OK'
else:
success_str = 'FAIL'
self._log_fd.write("<p><tt>%s:\n</tt></p>\n<p><tt>%s "\
"Finished Task[%s]: %s</tt></p>\n" %
(name, self.timestamp, result.task, success_str))
if isinstance(result.output, types.ListType):
for r in result.output:
self._log_fd.write("<pre>%s</pre>\n<br />\n<br />\n" %
r.strip())
else:
self._log_fd.write("<pre>%s</pre>\n<br />\n<br />\n" %
result.output.strip())
self._log_fd.flush()
| gpl-3.0 | 3,304,960,080,265,519,600 | 29.131034 | 76 | 0.547265 | false | 4.119107 | false | false | false |
NASLab/GroundROS | src/experimental_results/path_planning_analysis_cost_function.py | 1 | 2102 | # python experimental tests for Husky
import numpy as np
from numpy import sin, cos, pi, zeros
# import matplotlib.pyplot as plt
def calibration_cost_function(parameters):
# print parameters
yaw_bounde = 1 * pi / 180
matrix_limits = 20
matrix_resolution = .005
number_of_rows = matrix_limits / matrix_resolution
cost_matrix = zeros((number_of_rows, number_of_rows))
env_data = np.load('env.npy')[1:]
x_offset_calibrate, y_offset_calibrate = parameters
yaw_calibrate = pi / 180 * (0)
# x_offset_calibrate = .2
# y_offset_calibrate = -.064
# x = [[]] * len(env_data)
# y = [[]] * len(env_data)
# print len(env_data)
for i in range(1, len(env_data) - 1):
if len(env_data[i]) > 0:
x = env_data[i][0]
y = env_data[i][1]
yaw = env_data[i][2]
if len(env_data[i+1])==0 or abs(yaw - env_data[i - 1][2]) > yaw_bounde or abs(yaw - env_data[i + 1][2]) > yaw_bounde:
continue
readings = env_data[i][3]
# k = 0
for j in range(len(readings)):
x_temp = readings[j][0] * cos(-readings[j][1])
y_temp = readings[j][0] * sin(-readings[j][1])
x_temp2 = x_temp * \
cos(yaw_calibrate) - y_temp * \
sin(yaw_calibrate) + x_offset_calibrate
y_temp2 = y_temp * \
cos(yaw_calibrate) + x_temp * \
sin(yaw_calibrate) + y_offset_calibrate
readings_x = x_temp2 * cos(yaw) - y_temp2 * sin(yaw) + x
readings_y = y_temp2 * cos(yaw) + x_temp2 * sin(yaw) + y
if readings_x < matrix_limits / 2 and readings_x > -matrix_limits / 2 and readings_y < matrix_limits / 2 and readings_y > -matrix_limits / 2:
cost_matrix[int((readings_x + matrix_limits / 2) / matrix_resolution), int(
(readings_y + matrix_limits / 2) / matrix_resolution)] = 1
# k += 1
cost = sum(sum(cost_matrix))
# print parameters,cost
return cost
| mit | 7,422,599,541,884,601,000 | 40.215686 | 157 | 0.528069 | false | 3.142003 | false | false | false |
HydrelioxGitHub/home-assistant | homeassistant/components/ring/__init__.py | 2 | 1581 | """Support for Ring Doorbell/Chimes."""
import logging
from requests.exceptions import ConnectTimeout, HTTPError
import voluptuous as vol
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['ring_doorbell==0.2.2']
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by Ring.com"
NOTIFICATION_ID = 'ring_notification'
NOTIFICATION_TITLE = 'Ring Setup'
DATA_RING = 'ring'
DOMAIN = 'ring'
DEFAULT_CACHEDB = '.ring_cache.pickle'
DEFAULT_ENTITY_NAMESPACE = 'ring'
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
}),
}, extra=vol.ALLOW_EXTRA)
def setup(hass, config):
"""Set up the Ring component."""
conf = config[DOMAIN]
username = conf[CONF_USERNAME]
password = conf[CONF_PASSWORD]
try:
from ring_doorbell import Ring
cache = hass.config.path(DEFAULT_CACHEDB)
ring = Ring(username=username, password=password, cache_file=cache)
if not ring.is_connected:
return False
hass.data['ring'] = ring
except (ConnectTimeout, HTTPError) as ex:
_LOGGER.error("Unable to connect to Ring service: %s", str(ex))
hass.components.persistent_notification.create(
'Error: {}<br />'
'You will need to restart hass after fixing.'
''.format(ex),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID)
return False
return True
| apache-2.0 | -4,661,935,200,043,395,000 | 27.745455 | 75 | 0.665402 | false | 3.77327 | false | false | false |
Stemby/ermeSms | ermesms/plugins/uis/GraphicalUI.py | 1 | 2233 | # -*- coding: utf-8 -*-
import sys
import traceback
from ermesms.plugins.Sender import Sender
from ermesms.PreferenceManager import PreferenceManager
from ermesms.plugins.UI import UI
class GraphicalUI(UI):
"""Interfaccia grafica in PyQt."""
MainFrame = None
def isAvailable(self):
"""Ritorna true se quest'interfaccia è utilizzabile."""
#non ci devono essere parametri se -gui non è specificato
result = (len(sys.argv) == 1) or ("-gui" in sys.argv)
try:
#PyQt4 has to be correctly installed
from PyQt4 import QtGui, QtCore
from ermesms.plugins.uis.MainFrame import MainFrame
except ImportError, e:
result = False
print e
return result
def getPriority(self):
"""Ritorna un codice di priorità. In caso più interfacce siano
utilizzabili, viene scelta quella a maggiore priorità."""
return 3
def run(self):
"""Avvia questa interfaccia."""
from PyQt4 import QtGui, QtCore
from ermesms.plugins.uis.MainFrame import MainFrame
import os
self.QtUIApp = QtGui.QApplication(sys.argv)
pluginWin=os.path.join(os.getcwd(),'qt4_plugins',
'imageformats','qjpeg.dll')
pluginMac=os.path.join(os.getcwd(),'qt4_plugins',
'imageformats','libqjpeg.dylib')
pluginUnix=os.path.join(os.getcwd(),'qt4_plugins',
'imageformats','libqjpeg.so')
if os.path.isfile(pluginWin) or os.path.isfile(pluginMac) or \
os.path.isfile(pluginUnix):
self.QtUIApp.setLibraryPaths(
QtCore.QStringList(os.path.join(os.getcwd(),'qt4_plugins')))
self.QtUIApp.setQuitOnLastWindowClosed(False)
self.MainFrame = MainFrame()
self.MainFrame.show()
sys.exit(self.QtUIApp.exec_())
def showFatalException(self, message):
"""Questo metodo viene richiamato nel caso in cui venga catturata
un'eccezione non gestita nel programma principale."""
from PyQt4 import QtGui
QtGui.QMessageBox.critical(self.MainFrame, "Errore", message)
| gpl-2.0 | -702,704,519,027,997,300 | 36.762712 | 76 | 0.618492 | false | 3.694859 | false | false | false |
eharney/cinder | cinder/tests/unit/keymgr/test_init.py | 1 | 1611 | # Copyright (c) 2016 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from castellan import options as castellan_opts
from oslo_config import cfg
from cinder import keymgr
from cinder import test
class InitTestCase(test.TestCase):
def setUp(self):
super(InitTestCase, self).setUp()
self.config = cfg.ConfigOpts()
castellan_opts.set_defaults(self.config)
self.config.set_default('backend',
'cinder.keymgr.conf_key_mgr.ConfKeyManager',
group='key_manager')
def test_blank_config(self):
kmgr = keymgr.API(self.config)
self.assertEqual(type(kmgr), keymgr.conf_key_mgr.ConfKeyManager)
def test_set_conf_key_manager(self):
self.config.set_override(
'backend',
'cinder.keymgr.conf_key_mgr.ConfKeyManager',
group='key_manager')
kmgr = keymgr.API(self.config)
self.assertEqual(type(kmgr), keymgr.conf_key_mgr.ConfKeyManager)
| apache-2.0 | -4,147,801,743,415,233,500 | 36.465116 | 78 | 0.673495 | false | 3.808511 | true | false | false |
SysCompass/compass-core | compass/hdsdiscovery/hdmanager.py | 2 | 3298 | """Manage hdsdiscovery functionalities"""
import os
import re
import logging
from compass.hdsdiscovery import utils
class HDManager:
"""Process a request."""
def __init__(self):
base_dir = os.path.dirname(os.path.realpath(__file__))
self.vendors_dir = os.path.join(base_dir, 'vendors')
self.vendor_plugins_dir = os.path.join(self.vendors_dir, '?/plugins')
def learn(self, host, credential, vendor, req_obj, oper="SCAN", **kwargs):
"""Insert/update record of switch_info. Get expected results from
switch according to sepcific operation.
:param req_obj: the object of a machine
:param host: switch IP address
:param credientials: credientials to access switch
:param oper: operations of the plugin (SCAN, GETONE, SET)
:param kwargs(optional): key-value pairs
"""
plugin_dir = self.vendor_plugins_dir.replace('?', vendor)
if not os.path.exists(plugin_dir):
logging.error('No such directory: %s', plugin_dir)
return None
plugin = utils.load_module(req_obj, plugin_dir, host, credential)
if not plugin:
# No plugin found!
#TODO add more code to catch excpetion or unexpected state
logging.error('no plugin %s to load from %s', req_obj, plugin_dir)
return None
return plugin.process_data(oper)
def is_valid_vendor(self, host, credential, vendor):
""" Check if vendor is associated with this host and credential
:param host: switch ip
:param credential: credential to access switch
:param vendor: the vendor of switch
"""
vendor_dir = os.path.join(self.vendors_dir, vendor)
if not os.path.exists(vendor_dir):
logging.error('no such directory: %s', vendor_dir)
return False
vendor_instance = utils.load_module(vendor, vendor_dir)
#TODO add more code to catch excpetion or unexpected state
if not vendor_instance:
# Cannot found the vendor in the directory!
logging.error('no vendor instance %s load from %s',
vendor, vendor_dir)
return False
return vendor_instance.is_this_vendor(host, credential)
def get_vendor(self, host, credential):
""" Check and get vendor of the switch.
:param host: switch ip:
:param credential: credential to access switch
"""
# List all vendors in vendors directory -- a directory but hidden
# under ../vendors
all_vendors = sorted(o for o in os.listdir(self.vendors_dir)
if os.path.isdir(os.path.join(self.vendors_dir, o))
and re.match(r'^[^\.]', o))
logging.debug("[get_vendor]: %s ", all_vendors)
for vname in all_vendors:
vpath = os.path.join(self.vendors_dir, vname)
instance = utils.load_module(vname, vpath)
#TODO add more code to catch excpetion or unexpected state
if not instance:
logging.error('no instance %s load from %s', vname, vpath)
continue
if instance.is_this_vendor(host, credential):
return vname
return None
| apache-2.0 | 5,828,995,181,071,411,000 | 36.908046 | 78 | 0.605215 | false | 4.041667 | false | false | false |
cuauv/software | vision/framework/bcf/llc.py | 1 | 1364 | import numpy as np
from scipy.spatial.distance import cdist
# ========================================================================
# USAGE: [Coeff]=LLC_coding_appr(B,X,knn,lambda)
# Approximated Locality-constraint Linear Coding
#
# Inputs
# B -M x d codebook, M entries in a d-dim space
# X -N x d matrix, N data points in a d-dim space
# knn -number of nearest neighboring
# lambda -regulerization to improve condition
#
# Outputs
# Coeff -N x M matrix, each row is a code for corresponding X
#
# Jinjun Wang, march 19, 2010
# ========================================================================
def llc_coding_approx(B, X, k_nn=5, beta=1e-4):
D = cdist(X, B, 'euclidean')
N = X.shape[0]
I = np.zeros((N, k_nn), 'int32')
for i in range(N):
d = D[i, :]
idx = np.argsort(d)
I[i, :] = idx[:k_nn]
II = np.eye(k_nn)
coeffs = np.zeros((N, B.shape[0]))
for i in range(N):
idx = I[i, :]
z = B[idx, :] - np.tile(X[i, :], (k_nn, 1)) # shift ith point to origin
z = z.dot(z.transpose())
z = z + II * beta * np.trace(z) # regularization (K>D)
w = np.linalg.solve(z, np.ones((k_nn, 1)))
w = w / np.sum(w) # enforce sum(w) = 1
coeffs[i, idx] = w.ravel()
return coeffs
| bsd-3-clause | 9,001,279,566,035,701,000 | 35.864865 | 79 | 0.478006 | false | 3.085973 | false | false | false |
bit-bots/bitbots_misc | bitbots_live_tool_rqt/scripts/information_tab.py | 1 | 8120 | import sys
#from PyQt4 import QtGui, QtCore
from status_msg import StatusMsg
from single_field import SingleField
class InformationTab :
def __init__(self, tab_no, field, rolelabel, penaltylabel, hardwarelabel, actionlabel, hideallcheck, targetcheck, \
ballcheck, teammatecheck, opponentcheck, undefcheck, activitystateGreen, activitystateRed):
"""
:param tab_no: the number of the robot tab
:param field: the soccer field
:param rolelabel: label for the robot role
:param penaltylabel: label for the penalty
:param hardwarelabel: label for the hardware status
:param actionlabel: label for the next action
:param hideallcheck: check box to hide everything
:param targetcheck: check box to hide the target
:param ballcheck: check box to hide the ball
:param teammatecheck: check box to hide teammates
:param opponentcheck: check box to hide opponents
:param undefcheck: check box to hide undefined obstacles
:param activitystateGreen: green indicator
:param activitystateRed: red indicator
"""
#tab index [0;3]
self.index = tab_no
# decodes the integer for the corresponding role
self.roleDecoder = {0: 'IDLING', 1: 'OTHER', 2: 'STRIKER', 3: 'SUPPORTER', 4: 'DEFENDER', 5: 'GOALIE'}
# decodes the integer for the corresponding action
self.actionDecoder = {0: 'Undefinded', 1: 'Positioning', 2: 'Going to ball', 3: 'Trying to score', 4: 'Waiting'}
self.stateDecoder = {0: 'CONTROLLABLE', 1: 'FALLING', 2: 'FALLEN', 3: 'GETTING_UP', 4: 'ANIMATION_RUNNING', 5: 'STARTUP', \
6: 'SHUTDOWN', 7: 'PENALTY', 8: 'PENALTY_ANIMATION', 9: 'RECORD', 10: 'WALKING', 11: 'MOTOR_OFF', \
12: 'HCM_OFF', 13: 'HARDWARE_PROBLEM', 14: 'PICKED_UP'}
# Labels, get msg
self.rolelabel = rolelabel
self.penaltylabel = penaltylabel
self.hardwarelabel = hardwarelabel
self.actionlabel = actionlabel
# Checkboxes, default checked, but hide all unchecked
self.hideallcheck = hideallcheck
self.hideallcheck.setChecked(False) # hide all undechecked
self.hideallcheck.stateChanged.connect(lambda: self.hideallstate())
self.targetcheck = targetcheck
self.targetcheck.setChecked(True)
self.targetcheck.stateChanged.connect(lambda: self.targetstate())
self.ballcheck = ballcheck
self.ballcheck.setChecked(True)
self.ballcheck.stateChanged.connect(lambda: self.ballstate())
self.teammatecheck = teammatecheck
self.teammatecheck.setChecked(True)
self.teammatecheck.stateChanged.connect(lambda: self.teammatestate())
self.opponentcheck = opponentcheck
self.opponentcheck.setChecked(True)
self.opponentcheck.stateChanged.connect(lambda: self.opponentstate())
self.undefcheck = undefcheck
self.undefcheck.setChecked(True)
self.undefcheck.stateChanged.connect(lambda: self.undefinedstate())
self.activitystateGreen = activitystateGreen
self.activitystateGreen.hide()
self.activitystateRed = activitystateRed
self.activitystateRed.show()
self.field = field
# Labels
def actualizePenaltylabel(self, data):
"""
updates the penalty label
:param data: a dictionary with the transmitted information
:return:
"""
if data.has_key(StatusMsg.label_penalty_rest):
self.penaltylabel.setText(str(data.get(StatusMsg.label_penalty_rest)))
def acutalizeRolelabel (self, data):
"""
updates the role label
:param data: a dictionary with the transmitted information
:return:
"""
if data.has_key(StatusMsg.label_role):
self.rolelabel.setText(self.roleDecoder.get(data.get(StatusMsg.label_role)))
def acutalizeActionlabel (self, data):
"""
updates the action label
:param data: a dictionary with the transmitted information
:return:
"""
if data.has_key(StatusMsg.label_action):
self.actionlabel.setText(self.actionDecoder.get(data.get(StatusMsg.label_action)))
def actualizeHardwarelabel (self, data):
"""
updates the hardware label
:param data: a dictionary with the transmitted information
:return:
"""
if data.has_key(StatusMsg.label_state):
self.hardwarelabel.setText(self.stateDecoder.get(data.get(StatusMsg.label_state)))
def actualizeActivitystate(self, data):
"""
:param data: a dictionary with the transmitted information
:return:
"""
if data.has_key(StatusMsg.label_penalized):
self.activitystateRed.raise_()
if data.get(StatusMsg.label_penalized) == True:
self.activitystateGreen.hide()
self.activitystateRed.show()
else:
self.activitystateGreen.show()
self.activitystateRed.hide()
def setStatusMsg(self, data):
"""
:param data: a dictionary with the transmitted information
:return:
"""
self.actualizePenaltylabel(data)
self.acutalizeRolelabel(data)
self.acutalizeActionlabel(data)
self.actualizeActivitystate(data)
self.actualizeHardwarelabel(data)
# Filters for objects on field
def hideallstate(self):
"""
hide every label on the field
:return:
"""
if self.hideallcheck.isChecked() == True:
self.field.setOwnRobotsVisibility(False, self.index)
self.field.setPathVisibility(False, self.index)
self.field.setBallVisibility(False, self.index)
self.field.setTeammateVisibility(False, self.index)
#self.field.setPathVisibility(False, self.index)
self.field.setOpponentVisibility(False, self.index)
self.field.setUndefVisibility(False, self.index)
self.ballcheck.setChecked(False)
self.teammatecheck.setChecked(False)
self.opponentcheck.setChecked(False)
self.undefcheck.setChecked(False)
self.targetcheck.setChecked(False)
else:
self.field.setOwnRobotsVisibility(True, self.index)
self.field.setPathVisibility(True, self.index)
self.field.setBallVisibility(True, self.index)
self.field.setTeammateVisibility(True, self.index)
#self.field.setPathVisibility(True, self.index)
self.field.setOpponentVisibility(True, self.index)
self.field.setUndefVisibility(True, self.index)
self.ballcheck.setChecked(True)
self.teammatecheck.setChecked(True)
self.opponentcheck.setChecked(True)
self.undefcheck.setChecked(True)
self.targetcheck.setChecked(True)
def targetstate(self):
if self.targetcheck.isChecked() == True:
self.field.setPathVisibility(True, self.index)
else:
self.field.setPathVisibility(False, self.index)
def ballstate(self):
if self.ballcheck.isChecked() == True:
self.field.setBallVisibility(True, self.index)
else:
self.field.setBallVisibility(False, self.index)
def teammatestate(self):
if self.teammatecheck.isChecked() == True:
self.field.setTeammateVisibility(True, self.index)
else:
self.field.setTeammateVisibility(False, self.index)
def opponentstate(self):
if self.opponentcheck.isChecked() == True:
self.field.setOpponentVisibility(True, self.index)
else:
self.field.setOpponentVisibility(False, self.index)
def undefinedstate(self):
if self.undefcheck.isChecked() == True:
self.field.setUndefVisibility(True, self.index)
else:
self.field.setUndefVisibility(False, self.index)
| mit | -3,320,580,296,835,911,700 | 36.767442 | 131 | 0.641626 | false | 4.041812 | false | false | false |
tgsmith61591/skutil | skutil/h2o/util.py | 1 | 13251 | from __future__ import print_function, division, absolute_import
import numpy as np
import h2o
import pandas as pd
import warnings
from collections import Counter
from pkg_resources import parse_version
from ..utils import (validate_is_pd, human_bytes, corr_plot,
load_breast_cancer_df, load_iris_df,
load_boston_df)
from .frame import _check_is_1d_frame
from .select import _validate_use
from .base import check_frame
from .fixes import rbind_all
from h2o.frame import H2OFrame
from sklearn.utils.validation import check_array
__all__ = [
'from_array',
'from_pandas',
'h2o_bincount',
'h2o_col_to_numpy',
'h2o_corr_plot',
'h2o_frame_memory_estimate',
'load_iris_h2o',
'load_boston_h2o',
'load_breast_cancer_h2o',
'reorder_h2o_frame',
'shuffle_h2o_frame'
]
def load_iris_h2o(include_tgt=True, tgt_name="Species", shuffle=False):
"""Load the iris dataset into an H2OFrame
Parameters
----------
include_tgt : bool, optional (default=True)
Whether or not to include the target
tgt_name : str, optional (default="Species")
The name of the target column.
shuffle : bool, optional (default=False)
Whether or not to shuffle the data
"""
X = from_pandas(load_iris_df(include_tgt, tgt_name, shuffle))
if include_tgt:
X[tgt_name] = X[tgt_name].asfactor()
return X
def load_breast_cancer_h2o(include_tgt=True, tgt_name="target", shuffle=False):
"""Load the breast cancer dataset into an H2OFrame
Parameters
----------
include_tgt : bool, optional (default=True)
Whether or not to include the target
tgt_name : str, optional (default="target")
The name of the target column.
shuffle : bool, optional (default=False)
Whether or not to shuffle the data
"""
X = from_pandas(load_breast_cancer_df(include_tgt, tgt_name, shuffle))
if include_tgt:
X[tgt_name] = X[tgt_name].asfactor()
return X
def load_boston_h2o(include_tgt=True, tgt_name="target", shuffle=False):
"""Load the boston housing dataset into an H2OFrame
Parameters
----------
include_tgt : bool, optional (default=True)
Whether or not to include the target
tgt_name : str, optional (default="target")
The name of the target column.
shuffle : bool, optional (default=False)
Whether or not to shuffle the data
"""
X = from_pandas(load_boston_df(include_tgt, tgt_name, shuffle))
return X
def h2o_col_to_numpy(column):
"""Return a 1d numpy array from a
single H2OFrame column.
Parameters
----------
column : H2OFrame column, shape=(n_samples, 1)
A column from an H2OFrame
Returns
-------
np.ndarray, shape=(n_samples,)
"""
x = _check_is_1d_frame(column)
_1d = x[x.columns[0]].as_data_frame(use_pandas=True)
return _1d[_1d.columns[0]].values
def _unq_vals_col(column):
"""Get the unique values and column name
from a column.
Returns
-------
str, np.ndarray : tuple
(c1_nm, unq)
"""
unq = column.unique().as_data_frame(use_pandas=True)
c1_nm = unq.columns[0]
unq = unq[unq.columns[0]].sort_values().reset_index()
return c1_nm, unq
def h2o_bincount(bins, weights=None, minlength=None):
"""Given a 1d column of non-negative ints, ``bins``, return
a np.ndarray of positional counts of each int.
Parameters
----------
bins : H2OFrame
The values
weights : list or H2OFrame, optional (default=None)
The weights with which to weight the output
minlength : int, optional (default=None)
The min length of the output array
"""
bins = _check_is_1d_frame(bins)
_, unq = _unq_vals_col(bins)
# ensure all positive
unq_arr = unq[_].values
if any(unq_arr < 0):
raise ValueError('values must be positive')
# make sure they're all ints
if np.abs((unq_arr.astype(np.int) - unq_arr).sum()) > 0:
raise ValueError('values must be ints')
# adjust minlength
if minlength is None:
minlength = 1
elif minlength < 0:
raise ValueError('minlength must be positive')
# create our output array
all_vals = h2o_col_to_numpy(bins)
output = np.zeros(np.maximum(minlength, unq_arr.max() + 1))
# check weights
if weights is not None:
if isinstance(weights, (list, tuple)):
weights = np.asarray(weights)
elif isinstance(weights, H2OFrame):
weights = h2o_col_to_numpy(weights)
if weights.shape[0] != all_vals.shape[0]:
raise ValueError('dim mismatch in weights and bins')
else:
weights = np.ones(all_vals.shape[0])
# update our bins
for val in unq_arr:
mask = all_vals == val
array_ones = np.ones(mask.sum())
weight_vals = weights[mask]
output[val] = np.dot(array_ones, weight_vals)
return output
def from_pandas(X):
"""A simple wrapper for H2OFrame.from_python. This takes
a pandas dataframe and returns an H2OFrame with all the
default args (generally enough) plus named columns.
Parameters
----------
X : pd.DataFrame
The dataframe to convert.
Returns
-------
H2OFrame
"""
pd, _ = validate_is_pd(X, None)
# older version of h2o are super funky with this
if parse_version(h2o.__version__) < parse_version('3.10.0.7'):
h = 1
else:
h = 0
# if h2o hasn't started, we'll let this fail through
return H2OFrame.from_python(X, header=h, column_names=X.columns.tolist())
def from_array(X, column_names=None):
"""A simple wrapper for H2OFrame.from_python. This takes a
numpy array (or 2d array) and returns an H2OFrame with all
the default args.
Parameters
----------
X : ndarray
The array to convert.
column_names : list, tuple (default=None)
the names to use for your columns
Returns
-------
H2OFrame
"""
X = check_array(X, force_all_finite=False)
return from_pandas(pd.DataFrame.from_records(data=X, columns=column_names))
def h2o_corr_plot(X, plot_type='cor', cmap='Blues_d', n_levels=5,
figsize=(11, 9), cmap_a=220, cmap_b=10, vmax=0.3,
xticklabels=5, yticklabels=5, linewidths=0.5,
cbar_kws={'shrink': 0.5}, use='complete.obs',
na_warn=True, na_rm=False):
"""Create a simple correlation plot given a dataframe.
Note that this requires all datatypes to be numeric and finite!
Parameters
----------
X : H2OFrame, shape=(n_samples, n_features)
The H2OFrame
plot_type : str, optional (default='cor')
The type of plot, one of ('cor', 'kde', 'pair')
cmap : str, optional (default='Blues_d')
The color to use for the kernel density estimate plot
if plot_type == 'kde'
n_levels : int, optional (default=5)
The number of levels to use for the kde plot
if plot_type == 'kde'
figsize : tuple (int), optional (default=(11,9))
The size of the image
cmap_a : int, optional (default=220)
The colormap start point
cmap_b : int, optional (default=10)
The colormap end point
vmax : float, optional (default=0.3)
Arg for seaborn heatmap
xticklabels : int, optional (default=5)
The spacing for X ticks
yticklabels : int, optional (default=5)
The spacing for Y ticks
linewidths : float, optional (default=0.5)
The width of the lines
cbar_kws : dict, optional
Any KWs to pass to seaborn's heatmap when plot_type = 'cor'
use : str, optional (default='complete.obs')
The "use" to compute the correlation matrix
na_warn : bool, optional (default=True)
Whether to warn in the presence of NA values
na_rm : bool, optional (default=False)
Whether to remove NAs
"""
X = check_frame(X, copy=False)
corr = None
if plot_type == 'cor':
use = _validate_use(X, use, na_warn)
cols = [str(u) for u in X.columns]
X = X.cor(use=use, na_rm=na_rm).as_data_frame(use_pandas=True)
X.columns = cols # set the cols to the same names
X.index = cols
corr = 'precomputed'
else:
# WARNING! This pulls everything into memory...
X = X.as_data_frame(use_pandas=True)
corr_plot(X, plot_type=plot_type, cmap=cmap, n_levels=n_levels,
figsize=figsize, cmap_a=cmap_a, cmap_b=cmap_b,
vmax=vmax, xticklabels=xticklabels, corr=corr,
yticklabels=yticklabels, linewidths=linewidths,
cbar_kws=cbar_kws)
def h2o_frame_memory_estimate(X, bit_est=32, unit='MB'):
"""We estimate the memory footprint of an H2OFrame
to determine, possibly, whether it's capable of being
held in memory or not.
Parameters
----------
X : H2OFrame
The H2OFrame in question
bit_est : int, optional (default=32)
The estimated bit-size of each cell. The default
assumes each cell is a signed 32-bit float
unit : str, optional (default='MB')
The units to report. One of ('MB', 'KB', 'GB', 'TB')
Returns
-------
mb : str
The estimated number of UNIT held in the frame
"""
X = check_frame(X, copy=False)
n_samples, n_features = X.shape
n_bits = (n_samples * n_features) * bit_est
n_bytes = n_bits // 8
return human_bytes(n_bytes, unit)
def _gen_optimized_chunks(idcs):
"""Given the list of indices, create more efficient chunks to minimize
the number of rbind operations required for the H2OFrame ExprNode cache.
"""
idcs = sorted(idcs)
counter = Counter(idcs)
counts = counter.most_common() # order desc
# the first index is the number of chunks we'll need to create.
n_chunks = counts[0][1]
chunks = [[] for _ in range(n_chunks)] # gen the number of chunks we'll need
# 1. populate the chunks each with their first idx (the most common)
# 2. pop from the counter
# 3. re-generate the most_common(), repeat
while counts:
val, n_iter = counts[0] # the one at the head of the list is the most common
for i in range(n_iter):
chunks[i].append(val)
counts.pop(0) # pop out the first idx...
# sort them
return [sorted(chunk) for chunk in chunks]
def reorder_h2o_frame(X, idcs, from_chunks=False):
"""Currently, H2O does not allow us to reorder
frames. This is a hack to rbind rows together in the
order prescribed.
Parameters
----------
X : H2OFrame
The H2OFrame to reorder
idcs : iterable
The order of the H2OFrame rows to be returned.
from_chunks : bool, optional (default=False)
Whether the elements in ``idcs`` are optimized chunks
generated by ``_gen_optimized_chunks``.
Returns
-------
new_frame : H2OFrame
The reordered H2OFrame
"""
# hack... slow but functional
X = check_frame(X, copy=False) # we're rbinding. no need to copy
# to prevent rbinding rows over, and over, and over
# create chunks. Rbind chunks that are progressively increasing.
# once we hit an index that decreases, rbind, and then start the next chunk
last_index = np.inf
chunks = [] # all of the chunks
chunk = [] # the current chunk being built
for i in idcs:
# if it's a chunk from balancer:
if from_chunks: # probably a list of indices
chunks.append(X[i, :])
# otherwise chunks have not been computed
else:
# while the indices increase adjacently
if i < last_index:
last_index = i
chunk.append(i)
# otherwise, they are no longer increasing
else:
# if a chunk exists
if chunk: # there should ALWAYS be a chunk
rows = X[chunk, :]
else:
rows = X[i, :]
# append the chunk and reset the list
chunks.append(rows)
chunk = []
last_index = np.inf
# print([type(c) for c in chunks]) # couldn't figure out an issue for a while...
return rbind_all(*chunks)
def shuffle_h2o_frame(X):
"""Currently, H2O does not allow us to shuffle
frames. This is a hack to rbind rows together in the
order prescribed.
Parameters
----------
X : H2OFrame
The H2OFrame to reorder
Returns
-------
shuf : H2OFrame
The shuffled H2OFrame
"""
warnings.warn('Shuffling H2O frames will eventually be deprecated, as H2O '
'does not allow re-ordering of frames by row. The current work-around '
'(rbinding the rows) is known to cause issues in the H2O ExprNode '
'cache for very large frames.', DeprecationWarning)
X = check_frame(X, copy=False)
idcs = np.random.permutation(np.arange(X.shape[0]))
shuf = reorder_h2o_frame(X, idcs) # do not generate optimized chunks here...
return shuf
| bsd-3-clause | 3,142,361,123,218,995,700 | 26.838235 | 89 | 0.609011 | false | 3.619503 | false | false | false |
DavidNorman/tensorflow | tensorflow/python/training/tracking/layer_utils.py | 9 | 11165 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities related to layer/model functionality."""
# TODO(b/110718070): Move these functions back to tensorflow/python/keras/utils
# once __init__ files no longer require all of tf.keras to be imported together.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import weakref
from tensorflow.python.util import object_identity
try:
# typing module is only used for comment type annotations.
import typing # pylint: disable=g-import-not-at-top, unused-import
except ImportError:
pass
def is_layer(obj):
"""Implicit check for Layer-like objects."""
# TODO(b/110718070): Replace with isinstance(obj, base_layer.Layer).
return hasattr(obj, "_is_layer") and not isinstance(obj, type)
def has_weights(obj):
"""Implicit check for Layer-like objects."""
# TODO(b/110718070): Replace with isinstance(obj, base_layer.Layer).
has_weight = (hasattr(type(obj), "trainable_weights")
and hasattr(type(obj), "non_trainable_weights"))
return has_weight and not isinstance(obj, type)
def cache_recursive_attribute(key):
"""Decorator to cache Layer properties which recursively depend on sub-layers.
A number of attributes in Keras Layers take the form:
```
@property
def thing(self):
return self._thing or any(layer.thing for layer in self.layers)
```
This means that checking these properties (e.g. dynamic, stateful, etc) must
traverse the entire graph of layers to determine whether any descent has
changed its state. This decorator adds a mechanism for Layers and trackable
data structures to broadcast mutations (including the addition or deletion
of layers) and allows the top level layer to safely cache results. In general,
if computing an attribute triggers a depth first search it is a good candidate
for this caching mechanism.
The architecture is optimized for safety and correctness rather than absolute
optimality. This manifests in two ways:
1) Parents are never removed. It is possible for layer A to depend on layer
B but subsequently remove that dependency. In that case, layer B will
continue to broadcast its mutations to layer A until either A or B is
deleted. However because the only effect is to invalidate a cache this
does not affect correctness. (And robustly removing dependencies is
difficult and error prone.)
2) Layers aggressively invalidate their caches when there is any ambiguity
of whether or not it is necessary. For instance, consider the following:
```
class MyLayer(tf.keras.layers.Layer):
def __init__(self):
super(MyLayer, self).__init__()
sub_layer = tf.keras.layers.Dense(1)
self.sub_layers = [
sub_layer # This will be picked up, converted to a ListWrapper,
# and added to self._layers
]
# Include the layer twice.
self.sub_layers.append(sub_layer)
# Remove one copy, but one copy remains.
self.sub_layers.pop()
```
In the example layer above, the set of tracked layers actually doesn't
change; however to know that in the general case the Layer needs
significant machinery to reason about what, if anything, has changed.
By invalidating on every mutation we don't need to concern ourselves
with the many types of mutations (append, pop, in-place replacement)
and their specific semantics.
Because mutations to layers are expected to be infrequent, this very
conservative approach captures the vast majority of the performance gains from
caching recursive properties while still remaining quite lightweight and easy
to reason about.
`tracking.cached_per_instance` provides a more detailed performance analysis
of the WeakKeyDictionary cache pattern.
Args:
key: A string indicating which field is being cached. While not strictly
necessary (since it could be obtained from f.__name__), it forces
deliberate behavior when caching an attribute.
Returns:
A caching decorater specialized to `key`.
"""
cache = weakref.WeakKeyDictionary()
def outer(f):
"""Attribute cache which has been specialized."""
@functools.wraps(f)
def wrapped(self):
"""Cache aware version of `f`."""
# Sentinels are unique per Layer/Trackable, but can be hashed. (Unlike
# some trackable data structures.) Consequently it makes sense to use the
# sentinel as a cache key rather than `self`.
sentinel = getattr(self, "_attribute_sentinel") # type: AttributeSentinel
if not sentinel.get(key) or sentinel not in cache:
cache[sentinel] = f(self)
sentinel.mark_cached(key)
output = cache[sentinel]
return output
return wrapped
return outer
def invalidate_recursive_cache(key):
"""Convenience decorator to invalidate the cache when setting attributes."""
def outer(f):
@functools.wraps(f)
def wrapped(self, value):
sentinel = getattr(self, "_attribute_sentinel") # type: AttributeSentinel
sentinel.invalidate(key)
return f(self, value)
return wrapped
return outer
class MutationSentinel(object):
"""Container for tracking whether a property is in a cached state."""
_in_cached_state = False
def mark_as(self, value): # type: (MutationSentinel, bool) -> bool
may_affect_upstream = (value != self._in_cached_state)
self._in_cached_state = value
return may_affect_upstream
@property
def in_cached_state(self):
return self._in_cached_state
class AttributeSentinel(object):
"""Container for managing attribute cache state within a Layer.
The cache can be invalidated either on an individual basis (for instance when
an attribute is mutated) or a layer-wide basis (such as when a new dependency
is added).
"""
def __init__(self, always_propagate=False):
self._parents = weakref.WeakSet()
self.attributes = collections.defaultdict(MutationSentinel)
# The trackable data structure containers are simple pass throughs. They
# don't know or care about particular attributes. As a result, they will
# consider themselves to be in a cached state, so it's up to the Layer
# which contains them to terminate propagation.
self.always_propagate = always_propagate
def __repr__(self):
return "{}\n {}".format(
super(AttributeSentinel, self).__repr__(),
{k: v.in_cached_state for k, v in self.attributes.items()})
def add_parent(self, node):
# type: (AttributeSentinel, AttributeSentinel) -> None
# Properly tracking removal is quite challenging; however since this is only
# used to invalidate a cache it's alright to be overly conservative. We need
# to invalidate the cache of `node` (since it has implicitly gained a child)
# but we don't need to invalidate self since attributes should not depend on
# parent Layers.
self._parents.add(node)
node.invalidate_all()
def get(self, key):
# type: (AttributeSentinel, str) -> bool
return self.attributes[key].in_cached_state
def _set(self, key, value):
# type: (AttributeSentinel, str, bool) -> None
may_affect_upstream = self.attributes[key].mark_as(value)
if may_affect_upstream or self.always_propagate:
for node in self._parents: # type: AttributeSentinel
node.invalidate(key)
def mark_cached(self, key):
# type: (AttributeSentinel, str) -> None
self._set(key, True)
def invalidate(self, key):
# type: (AttributeSentinel, str) -> None
self._set(key, False)
def invalidate_all(self):
# Parents may have different keys than their children, so we locally
# invalidate but use the `invalidate_all` method of parents.
for key in self.attributes.keys():
self.attributes[key].mark_as(False)
for node in self._parents:
node.invalidate_all()
def filter_empty_layer_containers(layer_list):
"""Filter out empty Layer-like containers and uniquify."""
# TODO(b/130381733): Make this an attribute in base_layer.Layer.
existing = object_identity.ObjectIdentitySet()
to_visit = layer_list[::-1]
while to_visit:
obj = to_visit.pop()
if obj in existing:
continue
existing.add(obj)
if is_layer(obj):
yield obj
else:
sub_layers = getattr(obj, "layers", None) or []
# Trackable data structures will not show up in ".layers" lists, but
# the layers they contain will.
to_visit.extend(sub_layers[::-1])
def gather_trainable_weights(trainable, sub_layers, extra_variables):
"""Lists the trainable weights for an object with sub-layers.
Args:
trainable: Whether the object collecting the variables is trainable.
sub_layers: A flat list of Layer objects owned by this object, to collect
variables from.
extra_variables: Any extra variables to include. Their `.trainable` property
is used to categorize them.
Returns:
A list of collected trainable weights/variables.
"""
if not trainable:
return []
weights = []
for layer in sub_layers:
weights += layer.trainable_weights
trainable_extra_variables = [
v for v in extra_variables if v.trainable]
return weights + trainable_extra_variables
def gather_non_trainable_weights(trainable, sub_layers, extra_variables):
"""Lists the non-trainable weights for an object with sub-layers.
Args:
trainable: Whether the object collecting the variables is trainable.
sub_layers: A flat list of Layer objects owned by this object, to collect
variables from.
extra_variables: Any extra variables to include. Their `.trainable` property
is used to categorize them.
Returns:
A list of collected non-trainable weights/variables.
"""
trainable_extra_variables = []
non_trainable_extra_variables = []
for v in extra_variables:
if v.trainable:
trainable_extra_variables.append(v)
else:
non_trainable_extra_variables.append(v)
weights = []
for layer in sub_layers:
weights += layer.non_trainable_weights
if not trainable:
trainable_weights = []
for layer in sub_layers:
trainable_weights += layer.trainable_weights
return (trainable_weights + trainable_extra_variables
+ weights + non_trainable_extra_variables)
return weights + non_trainable_extra_variables
| apache-2.0 | 7,913,808,457,325,581,000 | 35.486928 | 80 | 0.699239 | false | 4.195791 | false | false | false |
DouglasLeeTucker/DECam_PGCM | bin/rawdata_clean_relevant_gaiadr2_data.py | 1 | 8174 | #!/usr/bin/env python
"""
rawdata_clean_relevant_gaiadr2_data.py
Example:
rawdata_clean_relevant_gaiadr2_data.py --help
rawdata_clean_relevant_gaiadr2_data.py --inputFile gaiadr2_new_rawdata_rawdata.csv --outputFile gaiadr2_new_y2a1_rawdata.u.csv.tmp --verbose 2
"""
##################################
def main():
import argparse
import time
"""Create command line arguments"""
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('--inputFile', help='name of the input CSV file', default='input.csv')
parser.add_argument('--outputFile', help='name of the output CSV file', default='output.csv')
parser.add_argument('--verbose', help='verbosity level of output to screen (0,1,2,...)', default=0, type=int)
args = parser.parse_args()
if args.verbose > 0: print args
status = clean_relevant_gaiadr2_data(args)
return status
##################################
# clean_relevant_gaiadr2_data
#
def clean_relevant_gaiadr2_data(args):
import numpy as np
import os
import sys
import datetime
import fitsio
import pandas as pd
if args.verbose>0:
print
print '* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *'
print 'clean_relevant_gaiadr2_data'
print '* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *'
print
inputFile = args.inputFile
outputFile = args.outputFile
# Read selected columns from inputFile...
columns = ['RA_WRAP','RA','DEC',
'PHOT_G_MEAN_MAG','PHOT_G_MEAN_FLUX_OVER_ERROR',
'PHOT_BP_MEAN_MAG','PHOT_BP_MEAN_FLUX_OVER_ERROR',
'PHOT_RP_MEAN_MAG','PHOT_RP_MEAN_FLUX_OVER_ERROR',
'BP_RP','BP_G','G_RP','PHOT_BP_RP_EXCESS_FACTOR']
print datetime.datetime.now()
print """Reading in selected columns from %s...""" % (inputFile)
df = pd.read_csv(inputFile, usecols=columns)
print datetime.datetime.now()
# Includes masks for mag, magerr, color, main stellar locus outliers,
# and BP_RP photometric excess...
mask = ( (1.086/df.PHOT_G_MEAN_FLUX_OVER_ERROR < 0.3) &
(1.086/df.PHOT_BP_MEAN_FLUX_OVER_ERROR < 0.3) &
(1.086/df.PHOT_RP_MEAN_FLUX_OVER_ERROR < 0.3) &
(df.PHOT_G_MEAN_MAG < 19.0) &
(df.BP_G > 0.2) & (df.BP_G < 1.6) &
(np.abs(df.G_RP - 0.45*(df.BP_RP + 0.2)) < 0.2) &
(df.PHOT_BP_RP_EXCESS_FACTOR > (1.0 + 0.015*df.BP_RP*df.BP_RP)) &
(df.PHOT_BP_RP_EXCESS_FACTOR < (1.3 + 0.060*df.BP_RP*df.BP_RP)) )
# Steve Kent's Gaia DR2 -> DES transformations, of the format:
# des_mag = Gaia_G + intercept + slope*( (Gaia_BP-Gaia_G) - color0 ),
# one relation for (Gaia_BP-Gaia_G) < color0 [blue],
# and another for (Gaia_BP-Gaia_G) > color0 [red].
#
# See S Kent's e-mail from 31 August 2018...
skent1 = {}
skent1['g.color0'] = 0.899
skent1['g.intercept'] = 1.339
skent1['g.blue.slope'] = 1.682
skent1['g.red.slope'] = 1.015
skent1['r.color0'] = 0.78
skent1['r.intercept'] = -0.124
skent1['r.blue.slope'] = -0.174
skent1['r.red.slope'] = 0.767
skent1['i.color0'] = 0.90
skent1['i.intercept'] = -0.674
skent1['i.blue.slope'] = -0.879
skent1['i.red.slope'] = -0.437
skent1['z.color0'] = 1.12
skent1['z.intercept'] = -1.216
skent1['z.blue.slope'] = -1.247
skent1['z.red.slope'] = -0.706
skent1['Y.color0'] = 0.91
skent1['Y.intercept'] = -1.052
skent1['Y.blue.slope'] = -1.441
skent1['Y.red.slope'] = -1.028
skent2 = {}
skent2['g.color0'] = 0.899
skent2['g.intercept'] = 1.349
skent2['g.blue.slope'] = 1.702
skent2['g.red.slope'] = 0.907
skent2['r.color0'] = 0.78
skent2['r.intercept'] = -0.116
skent2['r.blue.slope'] = -0.151
skent2['r.red.slope'] = 0.747
skent2['i.color0'] = 0.90
skent2['i.intercept'] = -0.691
skent2['i.blue.slope'] = -0.925
skent2['i.red.slope'] = -0.410
skent2['z.color0'] = 1.12
skent2['z.intercept'] = -1.217
skent2['z.blue.slope'] = -1.282
skent2['z.red.slope'] = -0.637
skent2['Y.color0'] = 0.91
skent2['Y.intercept'] = -1.055
skent2['Y.blue.slope'] = -1.514
skent2['Y.red.slope'] = -0.992
skent3 = {}
skent3['g.color0'] = 0.899
skent3['g.intercept'] = 1.306
skent3['g.blue.slope'] = 1.634
skent3['g.red.slope'] = 0.939
skent3['r.color0'] = 0.78
skent3['r.intercept'] = -0.136
skent3['r.blue.slope'] = -0.179
skent3['r.red.slope'] = 0.747
skent3['i.color0'] = 0.90
skent3['i.intercept'] = -0.678
skent3['i.blue.slope'] = -0.905
skent3['i.red.slope'] = -0.444
skent3['z.color0'] = 1.12
skent3['z.intercept'] = -1.193
skent3['z.blue.slope'] = -1.256
skent3['z.red.slope'] = -0.873
skent3['Y.color0'] = 0.91
skent3['Y.intercept'] = -1.034
skent3['Y.blue.slope'] = -1.464
skent3['Y.red.slope'] = -1.094
for band in ['g', 'r', 'i', 'z', 'Y']:
# S Kent #1:
desMagColName1 = """%sMAG_DES_1""" % (band.upper())
color0 = """%s.color0""" % (band)
intercept = """%s.intercept""" % (band)
blue_slope = """%s.blue.slope""" % (band)
red_slope = """%s.red.slope""" % (band)
df.loc[:,desMagColName1] = -9999.
blueMask = (mask & (df.BP_G <= skent1[color0]))
redMask = (mask & (df.BP_G > skent1[color0]))
df.loc[blueMask,desMagColName1] = df.loc[blueMask,'PHOT_G_MEAN_MAG'] + \
+ skent1[intercept] + skent1[blue_slope]*(df.loc[blueMask,'BP_G'] - skent1[color0])
df.loc[redMask,desMagColName1] = df.loc[redMask,'PHOT_G_MEAN_MAG'] + \
+ skent1[intercept] + skent1[red_slope]*(df.loc[redMask,'BP_G'] - skent1[color0])
# S Kent #2:
desMagColName2 = """%sMAG_DES_2""" % (band.upper())
color0 = """%s.color0""" % (band)
intercept = """%s.intercept""" % (band)
blue_slope = """%s.blue.slope""" % (band)
red_slope = """%s.red.slope""" % (band)
df.loc[:,desMagColName2] = -9999.
blueMask = (mask & (df.BP_G <= skent2[color0]))
redMask = (mask & (df.BP_G > skent2[color0]))
df.loc[blueMask,desMagColName2] = df.loc[blueMask,'PHOT_G_MEAN_MAG'] + \
+ skent2[intercept] + skent2[blue_slope]*(df.loc[blueMask,'BP_G'] - skent1[color0])
df.loc[redMask,desMagColName2] = df.loc[redMask,'PHOT_G_MEAN_MAG'] + \
+ skent2[intercept] + skent2[red_slope]*(df.loc[redMask,'BP_G'] - skent1[color0])
# S Kent #3:
desMagColName3 = """%sMAG_DES_3""" % (band.upper())
color0 = """%s.color0""" % (band)
intercept = """%s.intercept""" % (band)
blue_slope = """%s.blue.slope""" % (band)
red_slope = """%s.red.slope""" % (band)
df.loc[:,desMagColName3] = -9999.
blueMask = (mask & (df.BP_G <= skent3[color0]))
redMask = (mask & (df.BP_G > skent3[color0]))
df.loc[blueMask,desMagColName3] = df.loc[blueMask,'PHOT_G_MEAN_MAG'] + \
+ skent3[intercept] + skent3[blue_slope]*(df.loc[blueMask,'BP_G'] - skent1[color0])
df.loc[redMask,desMagColName3] = df.loc[redMask,'PHOT_G_MEAN_MAG'] + \
+ skent3[intercept] + skent3[red_slope]*(df.loc[redMask,'BP_G'] - skent1[color0])
# S Kent average...
desMagColName = """%sMAG_DES""" % (band.upper())
df.loc[:,desMagColName] = ( df.loc[:,desMagColName1] + \
df.loc[:,desMagColName2] + \
df.loc[:,desMagColName3] ) / 3.
# Output results...
outcolumns = columns.extend(['GMAG_DES','RMAG_DES','IMAG_DES','ZMAG_DES','YMAG_DES'])
df.to_csv(outputFile, columns=outcolumns, index=False, float_format='%.6f')
return 0
##################################
if __name__ == "__main__":
main()
##################################
| gpl-3.0 | -8,855,762,044,883,070,000 | 31.436508 | 146 | 0.535111 | false | 2.708416 | false | false | false |
dlf412/mysql-cdc-redis | cdc_config.py | 1 | 1258 | #!/usr/bin/env python
# encoding: utf-8
# redis for saving binlog file and position
# please reverse the db + 1 for saving mysql changed data
redis_url = "redis://127.0.0.1/0"
cache_url = "redis://127.0.0.1/1"
# mysql server id
server_id = 1
# mysql connection setting
mysql_settings = {'host': '192.168.1.34',
'port': 3306,
'user': 'mediawise',
'passwd': '123'}
# watch databases setting
# it can be set None or a tuple.
# Watch all databases if set None
# value format: None or database tuple
schemas = None
# watch tables setting like databases setting if with primary_key
# value format: None or table tuple
#tables = ("task",)
tables = None
# please set the unique key if without primary_key
# value format:
# {} or {"table_name": fields tuple, ....}
tables_without_primary_key = {"db_test.task_test": ("uuid", )}
# Read on binlog stream is blocking, default is True
# If set to False, the cdc problem will exit when reading binlog over
blocking = True
# watch event setting
events = ["insert", "update", "delete"]
# turn off dumping trigger if set to 0
cache_max_rows = 2000000
dump_command = "python dump2csv.py -c dump.conf"
log_level = "INFO"
binlog_max_latency = 60000
| mit | -1,052,136,330,095,489,400 | 25.208333 | 69 | 0.664547 | false | 3.372654 | false | false | false |
dimara/synnefo | snf-pithos-backend/pithos/backends/lib/sqlite/permissions.py | 10 | 8486 | # Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from xfeatures import XFeatures
from groups import Groups
from public import Public
from node import Node
from collections import defaultdict
READ = 0
WRITE = 1
class Permissions(XFeatures, Groups, Public, Node):
def __init__(self, **params):
XFeatures.__init__(self, **params)
Groups.__init__(self, **params)
Public.__init__(self, **params)
Node.__init__(self, **params)
def access_grant(self, path, access, members=()):
"""Grant members with access to path.
Members can also be '*' (all),
or some group specified as 'owner:group'."""
if not members:
return
feature = self.xfeature_create(path)
self.feature_setmany(feature, access, members)
def access_set(self, path, permissions):
"""Set permissions for path. The permissions dict
maps 'read', 'write' keys to member lists."""
r = permissions.get('read', [])
w = permissions.get('write', [])
if not r and not w:
self.xfeature_destroy(path)
return
feature = self.xfeature_create(path)
self.feature_clear(feature, READ)
self.feature_clear(feature, WRITE)
if r:
self.feature_setmany(feature, READ, r)
if w:
self.feature_setmany(feature, WRITE, w)
def access_get_for_bulk(self, perms):
"""Get permissions for paths."""
allowed = None
d = defaultdict(list)
for value, feature_id, key in perms:
d[key].append(value)
permissions = d
if READ in permissions:
allowed = 0
permissions['read'] = permissions[READ]
del(permissions[READ])
if WRITE in permissions:
allowed = 1
permissions['write'] = permissions[WRITE]
del(permissions[WRITE])
return (permissions, allowed)
def access_get(self, path):
"""Get permissions for path."""
feature = self.xfeature_get(path)
if not feature:
return {}
permissions = self.feature_dict(feature)
if READ in permissions:
permissions['read'] = permissions[READ]
del(permissions[READ])
if WRITE in permissions:
permissions['write'] = permissions[WRITE]
del(permissions[WRITE])
return permissions
def access_members(self, path):
feature = self.xfeature_get(path)
if not feature:
return []
permissions = self.feature_dict(feature)
members = set()
members.update(permissions.get(READ, []))
members.update(permissions.get(WRITE, []))
for m in set(members):
parts = m.split(':', 1)
if len(parts) != 2:
continue
user, group = parts
members.remove(m)
members.update(self.group_members(user, group))
return members
def access_clear(self, path):
"""Revoke access to path (both permissions and public)."""
self.xfeature_destroy(path)
self.public_unset(path)
def access_clear_bulk(self, paths):
"""Revoke access to path (both permissions and public)."""
self.xfeature_destroy_bulk(paths)
self.public_unset_bulk(paths)
def access_check(self, path, access, member):
"""Return true if the member has this access to the path."""
feature = self.xfeature_get(path)
if not feature:
return False
members = self.feature_get(feature, access)
if member in members or '*' in members:
return True
for owner, group in self.group_parents(member):
if owner + ':' + group in members:
return True
return False
def access_check_bulk(self, paths, member):
rows = None
q = ("select x.path, xvals.value, xvals.feature_id, xvals.key "
"from xfeaturevals xvals join xfeatures x "
"on xvals.feature_id = x.feature_id "
"where x.path in (%s)") % ','.join('?' for _ in paths)
self.execute(q, paths)
rows = self.fetchall()
if rows:
access_check_paths = {}
for path, value, feature_id, key in rows:
try:
access_check_paths[path].append((value, feature_id, key))
except KeyError:
access_check_paths[path] = [(value, feature_id, key)]
return access_check_paths
return None
def access_inherit(self, path):
"""Return the paths influencing the access for path."""
# r = self.xfeature_inherit(path)
# if not r:
# return []
# # Compute valid.
# return [x[0] for x in r if x[0] in valid]
# Only keep path components.
parts = path.rstrip('/').split('/')
valid = []
for i in range(1, len(parts)):
subp = '/'.join(parts[:i + 1])
valid.append(subp)
if subp != path:
valid.append(subp + '/')
return [x for x in valid if self.xfeature_get(x)]
def access_inherit_bulk(self, paths):
"""Return the paths influencing the access for paths."""
# Only keep path components.
valid = []
for path in paths:
parts = path.rstrip('/').split('/')
for i in range(1, len(parts)):
subp = '/'.join(parts[:i + 1])
valid.append(subp)
if subp != path:
valid.append(subp + '/')
valid = self.xfeature_get_bulk(valid)
return [x[1] for x in valid]
def access_list_paths(self, member, prefix=None, include_owned=False,
include_containers=True):
"""Return the list of paths granted to member.
Keyword arguments:
prefix -- return only paths starting with prefix (default None)
include_owned -- return also paths owned by member (default False)
include_containers -- return also container paths owned by member
(default True)
"""
q = ("select distinct path from xfeatures inner join "
" (select distinct feature_id, key from xfeaturevals inner join "
" (select owner || ':' || name as value from groups "
" where member = ? union select ? union select '*') "
" using (value)) "
"using (feature_id)")
p = (member, member)
if prefix:
q += " where "
paths = self.access_inherit(prefix) or [prefix]
q += ' or '.join("path like ? escape '\\'" for _ in paths)
p += tuple(self.escape_like(path) + '%' for path in paths)
self.execute(q, p)
l = [r[0] for r in self.fetchall()]
if include_owned:
node = self.node_lookup(member)
select_containers = "select node from nodes where parent = ? "
q = ("select path from nodes where parent in (%s) " %
select_containers)
args = [node]
if include_containers:
q += ("or node in (%s)" % select_containers)
args += [node]
self.execute(q, args)
l += [r[0] for r in self.fetchall() if r[0] not in l]
return l
def access_list_shared(self, prefix=''):
"""Return the list of shared paths."""
q = "select path from xfeatures where "
paths = self.access_inherit(prefix) or [prefix]
q += ' or '.join("path like ? escape '\\'" for _ in paths)
p = tuple(self.escape_like(path) + '%' for path in paths)
self.execute(q, p)
return [r[0] for r in self.fetchall()]
| gpl-3.0 | 8,818,997,615,140,552,000 | 34.655462 | 79 | 0.56057 | false | 4.223992 | false | false | false |
OmnesRes/pan_cancer | paper/cox_regression/KIRC/patient_info.py | 1 | 6241 | ## A script for extracting info about the patients used in the analysis
## Load necessary modules
from rpy2 import robjects as ro
import numpy as np
import os
ro.r('library(survival)')
##This call will only work if you are running python from the command line.
##If you are not running from the command line manually type in your paths.
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
f=open(os.path.join(BASE_DIR,'tcga_data','KIRC','clinical','nationwidechildrens.org_clinical_follow_up_v1.0_kirc.txt'))
f.readline()
f.readline()
f.readline()
data=[i.split('\t') for i in f]
## A patient can be listed multiple times in the file. The most recent listing (furthest down in the file), contains the most recent
## follow up data. This code checks if the patient has already been loaded into the list, and if so, takes the more recent data.
## This required an empty value in the list initialization.
## Data is: [[Patient ID, time(days), Vital status],[Patient ID, time(days), Vital status],...]
clinical=[['','','']]
for i in data:
try:
if clinical[-1][0]==i[0]:
if i[8]=='Alive':
clinical[-1]=[i[0],int(i[9]),'Alive']
elif i[8]=='Dead':
clinical[-1]=[i[0],int(i[10]),'Dead']
else:
pass
else:
if i[8]=='Alive':
clinical.append([i[0],int(i[9]),'Alive'])
elif i[8]=='Dead':
clinical.append([i[0],int(i[10]),'Dead'])
else:
pass
except:
pass
## Removing the empty value.
clinical=clinical[1:]
## Grade, sex, and age information were taken from the "clinical_patient" file. A dictionary was created for sex and grade.
more_clinical={}
grade_dict={}
grade_dict['G1']=1
grade_dict['G2']=2
grade_dict['G3']=3
grade_dict['G4']=4
sex_dict={}
sex_dict['MALE']=0
sex_dict['FEMALE']=1
## The "clinical_patient" file can also contain patients not listed in the follow_up files.
## In these cases the clinical data for these patients gets appended to a new clinical list.
f=open(os.path.join(BASE_DIR,'tcga_data','KIRC','clinical','nationwidechildrens.org_clinical_patient_kirc.txt'))
f.readline()
f.readline()
f.readline()
clinical4=[]
data=[i.split('\t') for i in f]
for i in data:
try:
more_clinical[i[0]]=[grade_dict[i[4]],sex_dict[i[8]],int(i[-16])]
if i[24]=='Alive':
clinical4.append([i[0],int(i[25]),'Alive'])
elif i[24]=='Dead':
clinical4.append([i[0],int(i[26]),'Dead'])
else:
pass
except:
pass
new_clinical=[]
##It is possible that the clinical data in the clinical_patient file is more up to date than the follow_up files
##All the clinical data is merged checking which data is the most up to date
for i in clinical4:
if i[0] not in [j[0] for j in clinical]:
new_clinical.append(i)
else:
if i[1]<=clinical[[j[0] for j in clinical].index(i[0])][1]:
new_clinical.append(clinical[[j[0] for j in clinical].index(i[0])])
else:
new_clinical.append(i)
##also do the reverse since clinical can contain patients not included in clinical4
for i in clinical:
if i[0] not in [j[0] for j in new_clinical]:
new_clinical.append(i)
## only patients who had a follow up time greater than 0 days are included in the analysis
clinical=[i for i in new_clinical if i[1]>0]
final_clinical=[]
## A new list containing both follow up times and grade, sex, and age is constructed.
## Only patients with grade, sex, and age information are included.
## Data is [[Patient ID, time (days), vital status, grade, sex, age at diagnosis],...]
for i in clinical:
if i[0] in more_clinical:
final_clinical.append(i+more_clinical[i[0]])
## Need to map the mRNA files to the correct patients
## The necessary information is included in the FILE_SAMPLE_MAP.txt file
f=open(os.path.join(BASE_DIR,'tcga_data','KIRC','FILE_SAMPLE_MAP.txt'))
f.readline()
data=[i.strip().split() for i in f if i!='\n']
## 01 indicates a primary tumor, and only primary tumors are included in this analysis
TCGA_to_mrna={}
for i in data:
## The normalized data files are used
if 'genes.normalized_results' in i[0]:
if i[1].split('-')[3][:-1]=='01':
x=''.join([k+j for k,j in zip(['','-','-'],i[1].split('-')[:3])])
TCGA_to_mrna[x]=TCGA_to_mrna.get(x,[])+[i[0]]
clinical_and_files=[]
## We only care about patients that contained complete clinical information
for i in final_clinical:
if TCGA_to_mrna.has_key(i[0]):
## The mRNA files are added to the clinical list
## Data structure: [[Patient ID, time (days), vital status, grade, sex, age at diagnosis,[mRNA files]],...]
clinical_and_files.append(i+[TCGA_to_mrna[i[0]]])
else:
pass
##print average age at diagnosis
age=np.mean([i[5] for i in clinical_and_files])
##print number of males
males=len([i for i in clinical_and_files if i[4]==0])
##print number of females
females=len([i for i in clinical_and_files if i[4]==1])
##to get the median survival we need to call survfit from r
##prepare variables for R
ro.globalenv['times']=ro.IntVector([i[1] for i in clinical_and_files])
##need to create a dummy variable group
ro.globalenv['group']=ro.IntVector([0 for i in clinical_and_files])
##need a vector for deaths
death_dic={}
death_dic['Alive']=0
death_dic['Dead']=1
ro.globalenv['died']=ro.IntVector([death_dic[i[2]] for i in clinical_and_files])
res=ro.r('survfit(Surv(times,died) ~ as.factor(group))')
#the number of events(deaths) is the fourth column of the output
deaths=str(res).split('\n')[-2].strip().split()[3]
#the median survival time is the fifth column of the output
median=str(res).split('\n')[-2].strip().split()[4]
##write data to a file
f=open('patient_info.txt','w')
f.write('Average Age')
f.write('\t')
f.write('Males')
f.write('\t')
f.write('Females')
f.write('\t')
f.write('Deaths')
f.write('\t')
f.write('Median Survival')
f.write('\n')
f.write(str(age))
f.write('\t')
f.write(str(males))
f.write('\t')
f.write(str(females))
f.write('\t')
f.write(deaths)
f.write('\t')
f.write(median)
| mit | 3,009,862,005,212,880,400 | 28.300469 | 132 | 0.645089 | false | 2.936941 | false | false | false |
timstaley/voeventdb | voeventdb/server/restapi/v1/filters.py | 1 | 9800 | from __future__ import absolute_import, unicode_literals
from voeventdb.server.database.models import Voevent, Cite, Coord
from voeventdb.server.database.query import coord_cone_search_clause
import voeventdb.server.restapi.v1.apierror as apierror
from voeventdb.server.restapi.v1.filter_base import (
add_to_filter_registry, QueryFilter)
import iso8601
from sqlalchemy import (or_, and_, exists,
)
from sqlalchemy.orm import aliased
import six
if six.PY3:
from urllib.parse import quote_plus
else:
from urllib import quote_plus
from flask import json
@add_to_filter_registry
class AuthoredSince(QueryFilter):
"""
Return only VOEvents with a ``Who.Date`` entry dated after the given time.
(Time-range is inclusive, i.e. ``>=``)
Date-time strings passed should be in a format parseable by the
`iso8601.parse_date() <https://bitbucket.org/micktwomey/pyiso8601/#rst-header-parsed-formats>`_
function (see :py:attr:`example_values`).
"""
querystring_key = 'authored_since'
example_values = ['2015-10-09T21:34:19',
'2015-10-09',
'2015-10',
]
def filter(self, filter_value):
bound_dt = iso8601.parse_date(filter_value)
return Voevent.author_datetime >= bound_dt
@add_to_filter_registry
class AuthoredUntil(QueryFilter):
"""
Return only VOEvents with a ``Who.Date`` entry dated before the given time.
(Time-range is inclusive, i.e. ``<=``)
Date-time strings passed should be in a format parseable by the
`iso8601.parse_date() <https://bitbucket.org/micktwomey/pyiso8601/#rst-header-parsed-formats>`_
function (see :py:attr:`example_values`).
"""
querystring_key = 'authored_until'
example_values = ['2015-10-09T21:34:19',
'2015-10-09',
'2015-10',
]
def filter(self, filter_value):
bound_dt = iso8601.parse_date(filter_value)
return Voevent.author_datetime <= bound_dt
@add_to_filter_registry
class CitedByAny(QueryFilter):
"""
Return only VOEvents which are cited by another VOEvent in the database.
Applied via query-strings ``cited=true`` or ``cited=false``
"""
querystring_key = 'cited'
example_values = ['true',
'false'
]
def filter(self, filter_value):
cite2 = aliased(Cite)
filter_q = exists().where(Voevent.ivorn == cite2.ref_ivorn)
if filter_value.lower() == 'true':
return filter_q
elif filter_value.lower() == 'false':
return ~filter_q
else:
raise apierror.InvalidQueryString(self.querystring_key,
filter_value)
@add_to_filter_registry
class ConeSearch(QueryFilter):
"""
Return only VOEvents with co-ords in the given cone.
Cone specified as a 3-element list in JSON format::
[ra,dec,radius]
(values in decimal degrees).
"""
querystring_key = 'cone'
example_values = [
'[10,20,5]',
'[359.9,-30,5]'
]
simplejoin_tables = [Coord, ]
def filter(self, filter_value):
try:
ra, dec, radius = json.loads(filter_value)
except:
raise apierror.InvalidQueryString(self.querystring_key,
filter_value)
if dec < -90.0 or dec > 90.0:
raise apierror.InvalidQueryString(self.querystring_key,
filter_value,
reason="invalid declination value")
return coord_cone_search_clause(ra, dec, radius)
@add_to_filter_registry
class CoordsAny(QueryFilter):
"""
Return only VOEvents which have / do not have associated co-ord positions.
Applied via query-strings ``coord=true`` or ``coord=false``
"""
querystring_key = 'coord'
example_values = ['true',
'false'
]
def filter(self, filter_value):
filter_q = Voevent.coords.any()
if filter_value.lower() == 'true':
return filter_q
elif filter_value.lower() == 'false':
return ~filter_q
else:
raise apierror.InvalidQueryString(self.querystring_key,
filter_value)
@add_to_filter_registry
class DecGreaterThan(QueryFilter):
"""
Return VOEvents with position with Dec greater than given value.
Dec should be specified in decimal degrees.
"""
querystring_key = 'dec_gt'
example_values = ['0',
'-45.123'
]
simplejoin_tables = [Coord, ]
def filter(self, filter_value):
try:
min_dec = float(filter_value)
if min_dec < -90.0 or min_dec > 90.0:
raise ValueError
except:
raise apierror.InvalidQueryString(self.querystring_key,
filter_value,
reason="invalid declination value")
return Coord.dec > min_dec
@add_to_filter_registry
class DecLessThan(QueryFilter):
"""
Return VOEvents with position with Dec less than given value.
Dec should be specified in decimal degrees.
"""
querystring_key = 'dec_lt'
example_values = ['0',
'-45.123'
]
simplejoin_tables = [Coord, ]
def filter(self, filter_value):
try:
max_dec = float(filter_value)
if max_dec < -90.0 or max_dec > 90.0:
raise ValueError
except:
raise apierror.InvalidQueryString(self.querystring_key,
filter_value,
reason="invalid declination value")
return Coord.dec < max_dec
@add_to_filter_registry
class IvornContains(QueryFilter):
"""
Return only VOEvents which have the given substring in their IVORN.
"""
querystring_key = 'ivorn_contains'
example_values = ['BAT_GRB_Pos',
'XRT']
def filter(self, filter_value):
return Voevent.ivorn.like('%{}%'.format(filter_value))
def combinator(self, filters):
"""AND"""
return and_(filters)
@add_to_filter_registry
class IvornPrefix(QueryFilter):
"""
Return only VOEvents where the IVORN begins with the given value.
Note that the value passed should be URL-encoded if it contains
the ``#`` character e.g.::
quote_plus('ivo://nvo.caltech/voeventnet/catot#1404')
"""
querystring_key = 'ivorn_prefix'
example_values = [
'ivo://nasa.gsfc.gcn',
quote_plus('ivo://nvo.caltech/voeventnet/catot#1404')
]
def filter(self, filter_value):
return Voevent.ivorn.like('{}%'.format(filter_value))
def combinator(self, filters):
"""OR"""
return or_(filters)
@add_to_filter_registry
class RefAny(QueryFilter):
"""
Return only VOEvents which make / don't make reference to any other VOEvents.
Applied via query-strings ``ref_any=true`` or ``ref_any=false``.
NB 'true'/'false' string-values are case-insensitive, so e.g.
'true', 'True', 'TRUE', 'tRUe' are all valid.
"""
querystring_key = 'ref_any'
example_values = ['true',
'True',
'false'
]
def filter(self, filter_value):
filter_q = Voevent.cites.any()
if filter_value.lower() == 'true':
return filter_q
elif filter_value.lower() == 'false':
return ~filter_q
else:
raise apierror.InvalidQueryString(self.querystring_key,
filter_value)
@add_to_filter_registry
class RefContains(QueryFilter):
"""
Return VOEvents which reference an IVORN containing the given substring.
"""
querystring_key = 'ref_contains'
example_values = [
quote_plus('BAT_GRB_Pos'),
quote_plus('GBM_Alert'),
]
def filter(self, filter_value):
return Voevent.cites.any(
Cite.ref_ivorn.like('%{}%'.format(filter_value))
)
def combinator(self, filters):
"""OR"""
return or_(filters)
@add_to_filter_registry
class RefExact(QueryFilter):
"""
Return only VOEvents which contain a ref to the given (url-encoded) IVORN.
"""
querystring_key = 'ref_exact'
example_values = [
quote_plus('ivo://nasa.gsfc.gcn/SWIFT#BAT_GRB_Pos_649113-680'),
quote_plus(
'ivo://nasa.gsfc.gcn/Fermi#GBM_Alert_2015-08-10T14:49:38.83_460910982_1-814'),
]
def filter(self, filter_value):
return Voevent.cites.any(Cite.ref_ivorn == filter_value)
def combinator(self, filters):
"""OR"""
return or_(filters)
@add_to_filter_registry
class RoleEquals(QueryFilter):
querystring_key = 'role'
example_values = [
'observation',
'utility',
'test'
]
def filter(self, filter_value):
if filter_value not in self.example_values:
raise apierror.InvalidQueryString(
self.querystring_key, filter_value)
return Voevent.role == filter_value
def combinator(self, filters):
"""OR"""
return or_(filters)
@add_to_filter_registry
class StreamEquals(QueryFilter):
querystring_key = 'stream'
example_values = [
'nasa.gsfc.gcn#SWIFT',
'nvo.caltech/voeventnet/catot'
]
def filter(self, filter_value):
return Voevent.stream == filter_value
def combinator(self, filters):
"""OR"""
return or_(filters)
| gpl-2.0 | -4,847,031,049,909,061,000 | 28.429429 | 99 | 0.579388 | false | 3.804348 | false | false | false |
spyoungtech/behave-webdriver | behave_webdriver/fixtures.py | 1 | 6881 | """
Provides fixtures to initialize the web driver.
"""
from behave import fixture, use_fixture
from behave_webdriver.utils import _from_string, _from_env
from behave_webdriver.driver import BehaveDriverMixin
from functools import partial
from behave_webdriver import transformers
import six
_env_webdriver_name = 'env'
class DriverNotSet:
pass
@fixture
def fixture_browser(context, *args, **kwargs):
"""
webdriver setup fixture for behave context; sets ``context.behave_driver``.
Will destroy the driver at the end of this fixture usage.
:param webdriver: the webdriver to use -- can be a string (e.g. ``"Chrome"``) or a webdriver class. If omitted, will attempt to use the BEHAVE_WEBDRIVER environment variable
:param default_driver: a fallback driver if webdriver keyword is not provided AND the BEHAVE_WEBDRIVER environment variable is not set. Defaults to 'Chrome.headless'
:param args: arguments that will be passed as is to the webdriver.
:param kwargs: keywords arguments that will be passed as is to the webdriver.
Basic usage:
>>> from behave import use_fixture
>>> from behave_webdriver.fixtures import fixture_browser
>>> def before_all(context):
... use_fixture(fixture_browser, context, webdriver='firefox')
You may also provide webdriver class. Just be sure it inherits (or otherwise has method from) BehaveDriverMixin
>>> from behave import use_fixture
>>> from behave_webdriver.fixtures import fixture_browser
>>> from behave_webdriver.driver import BehaveDriverMixin
>>> from selenium.webdriver import Firefox
>>> class FirefoxDriver(BehaveDriverMixin, Firefox):
... pass
>>> def before_all(context):
... use_fixture(fixture_browser, context, webdriver=FirefoxDriver)
positional arguments and additional keyword arguments are passed to the webdriver init:
>>> from behave import use_fixture
>>> from behave_webdriver.fixtures import fixture_browser
>>> from behave_webdriver.driver import ChromeOptions
>>> def before_all(context):
... options = ChromeOptions()
... options.add_argument('--ignore-gpu-blacklist')
... use_fixture(fixture_browser, context, webdriver='chrome', options=options)
If the ``webdriver`` keyword is omitted, will attampt to get the driver from BEHAVE_WEBDRIVER or will use headless chrome as a final fallback if environment is not set and there is no ``default_driver`` specified
>>> from behave import use_fixture
>>> from behave_webdriver.fixtures import fixture_browser
>>> def before_all(context):
... # try to use driver from BEHAVE_WEBDRIVER environment variable; use firefox as a fallback when env not set
... use_fixture(fixture_browser, context, default_driver='firefox')
"""
webdriver = kwargs.pop('webdriver', None)
default_driver = kwargs.pop('default_driver', 'Chrome.headless')
if isinstance(webdriver, six.string_types):
webdriver = _from_string(webdriver)
if webdriver is None:
webdriver = _from_env(default_driver=default_driver)
old_driver_class = context.BehaveDriver if 'BehaveDriver' in context else DriverNotSet
old_driver = context.behave_driver if 'behave_driver' in context else DriverNotSet
context.behave_driver = webdriver(*args, **kwargs)
def cleanup_driver(ctx, old_driver, old_driver_class):
try:
ctx.behave_driver.quit()
finally:
if old_driver_class is DriverNotSet and 'BehaveDriver' in ctx:
del ctx.BehaveDriver
else:
ctx.BehaveDriver = old_driver_class
if old_driver is DriverNotSet and 'behave_driver' in ctx:
del ctx.behave_driver
else:
ctx.behave_driver = old_driver
cleanup = partial(cleanup_driver, context, old_driver, old_driver_class)
context.add_cleanup(cleanup)
def before_all_factory(*args, **kwargs):
"""
Create and return a ``before_all`` function that use the ``fixture_browser`` fixture with the corresponding arguments
:param args: positional arguments of ``fixture_browser``
:param kwargs: keywords arguments of ``fixture_browser``
>>> from behave_webdriver.fixtures import before_all_factory
>>> before_all = before_all_factory(webdriver='firefox')
"""
def before_all(context):
use_fixture(fixture_browser, context, *args, **kwargs)
return before_all
def before_feature_factory(*args, **kwargs):
"""
Create and return a ``before_feature` function that use the ``fixture_browser`` fixture with the corresponding arguments
:param args: positional arguments of ``fixture_browser``
:param kwargs: keywords arguments of ``fixture_browser``
>>> from behave_webdriver.fixtures import before_feature_factory
>>> before_feature = before_feature_factory(webdriver='firefox')
"""
def before_feature(context, feature):
use_fixture(fixture_browser, context, *args, **kwargs)
return before_feature
def before_scenario_factory(*args, **kwargs):
"""
Create and return a ``before_scenario`` function that use the ``fixture_browser`` fixture with the corresponding arguments
:param args: positional arguments of ``fixture_browser``
:param kwargs: keywords arguments of ``fixture_browser``
>>> from behave_webdriver.fixtures import before_scenario_factory
>>> before_scenario = before_scenario_factory(webdriver='firefox')
"""
def before_scenario(context, scenario):
use_fixture(fixture_browser, context, *args, **kwargs)
return before_scenario
class TransformerNotSet:
pass
@fixture
def transformation_fixture(context, transformer_class, *args, **kwargs):
old_transformer = context.transformer_class if 'transformer_class' in context else TransformerNotSet
transformer_class = partial(transformer_class, *args, **kwargs)
context.transformer_class = transformer_class
def cleanup(context, old):
if old is TransformerNotSet:
del context.transformer_class
else:
context.transformer_class = old
cleanup_transformer = partial(cleanup, context, old_transformer)
context.add_cleanup(cleanup_transformer)
def use_fixture_tag(context, tag, *args, **kwargs):
if not tag.startswith('fixture'):
return
if tag.startswith('fixture.webdriver'):
browser_name = '.'.join(tag.split('.')[2:])
if browser_name == 'browser':
browser_name = 'Chrome.headless'
use_fixture(fixture_browser, context, *args, **kwargs)
elif tag.startswith('fixture.transformer'):
transformer_name = tag.split('.')[-1]
transformer_class = getattr(transformers, transformer_name)
use_fixture(transformation_fixture, context, transformer_class, **kwargs)
| mit | -6,543,961,173,381,331,000 | 39.476471 | 216 | 0.697428 | false | 4.34681 | false | false | false |
wenrr89/airtest | airtest/__init__.py | 1 | 3805 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#coding: utf-8
#
#__all__=['devsuit', 'android', 'image', 'base', 'patch', 'ios', 'device']
__version__ = '0.7.2'
ANDROID = 'android'
IOS = 'ios'
WINDOWS='windows'
EV_DOWN = 'down'
EV_UP = 'up'
EV_DOWN_AND_UP = 'down_and_up'
import os
import json
import subprocess
import signal, sys
# just import
import monitor
def _sig_handler(signum, frame):
print >>sys.stderr, 'Signal INT catched !!!'
sys.exit(1)
signal.signal(signal.SIGINT, _sig_handler)
from airtest import devsuit
defaultConfigFile = 'air.json'
defaultDevice = 'android'
def _safe_load_config(cfg_file):
if os.path.exists(cfg_file):
return json.load(open(cfg_file))
return {}
#
## ==========================================================
#
def _android_start(serialno, params):
package = params.get('package')
activity = params.get('activity')
subprocess.call(['adb', '-s', serialno, 'shell', 'am', 'start', '-n', '/'.join([package, activity])])
def _android_stop(serialno, params):
package = params.get('package')
subprocess.call(['adb', '-s', serialno, 'shell', 'am', 'force-stop', package])
def _windows_start(basename, params={}):
dir_ = params.get('dir') or '.'
os.system('cd /d %s && start %s' %(dir_, basename))
def _windows_stop(basename, params={}):
basename = basename.lower()
if not basename.endswith('.exe'):
basename += '.exe'
os.system('taskkill /t /f /im %s' %(basename))
def _run_control(devno, device=None, action='start'):
device = device or defaultDevice
cfg = _safe_load_config(defaultConfigFile)
func = '_%s_%s'%(device, action)
if func not in globals():
raise RuntimeError('device(%s) %s method not exists' % (device, action))
return globals()[func](devno, cfg.get(device, {}))
def start(devno, device=None):
_run_control(devno, device, 'start')
def stop(devno, device=None):
_run_control(devno, device, 'stop')
#
## ----------------------------------------------------------
#
def connect(phoneno=None, appname=None, device=None, monitor=True, logfile='log/airtest.log'):
'''
Connect device
@param phoneno: If phoneno is None, then get device serialno from `adb devices`
@param device: can be one of <android|windows|ios>
@param monitor: wether to enable CPU monitor
'''
if not phoneno:
devs = getDevices()
if not devs:
sys.exit('adb: No devices found')
if len(devs) != 1:
sys.exit('adb: Too many devices, need to specify phone serialno')
phoneno = devs[0][0]
device = device or defaultDevice
if device == ANDROID:
from airtest.device import android
subprocess.call(['adb', 'start-server'])
if not phoneno:
phoneno = [d for d, t in getDevices() if t == 'device'][0]
devClass = android.Device
elif device == IOS:
from airtest.device import ios
devClass = ios.Device
elif device == WINDOWS:
from airtest.device import windows
devClass = windows.Device
elif device == 'dummy': # this class is only for test
from airtest.device import dummy
devClass = dummy.Device
else:
raise RuntimeError('device type not recognize')
return devsuit.DeviceSuit(device, devClass, phoneno,
appname=appname, logfile=logfile, monitor=monitor)
def getDevices(device='android'):
'''
@return devices list
'''
subprocess.call(['adb', 'start-server'])
output = subprocess.check_output(['adb', 'devices'])
result = []
for line in str(output).splitlines()[1:]:
ss = line.strip().split()
if len(ss) == 2:
(phoneno, state) = ss
result.append((phoneno, state))
return result
| bsd-3-clause | 2,023,906,217,956,071,700 | 28.045802 | 105 | 0.600263 | false | 3.474886 | true | false | false |
adamreeve/Mendeley-FileSync | mendeleyfilesync.py | 1 | 12774 | #!/usr/bin/env python
"""
Synchronise the location of files in the Mendeley database using a
relative base path by storing the locations in a text database that
can by synchronised.
Currently ignores files outside of the base path.
It will also only add new files, it won't clean deleted files.
Designed to be used with something like Unison or DropBox to
synchronise the PDF files.
"""
from argparse import ArgumentParser
import os
import sys
import urllib
from itertools import ifilter
try:
import sqlite3
except:
from pysqlite2 import dbapi2 as sqlite3
def main():
# Parse command line arguments
parser = ArgumentParser(
prog='mendeleyfilesync.py',
description="Synchronise the location of files in the Mendeley "
"database using a relative base path.")
parser.add_argument('mendeley_database',
help='Path to the Mendeley sqlite database, eg. '
'"~/.local/share/data/Mendeley Ltd./Mendeley Desktop/'
'you@somewhere.com@www.mendeley.com.sqlite"')
parser.add_argument('text_database',
help="Path to the text datbase used to store file locations, "
"eg. ~/.mendeley_files.dat")
parser.add_argument('file_path',
help="Directory used to store PDF files")
parser.add_argument('-d', '--dry-run',
action='store_const', dest='dry_run',
const=True, default=False,
help="Display changes that would be made but don't actually "
"modify the database")
parser.add_argument('-f', '--force-update',
action='store_const', dest='force_update',
const=True, default=False,
help="Replace file path in Mendeley with path from the text "
"database when there is a conflict")
args = parser.parse_args()
# Check path to Mendeley database file
if not os.path.isfile(args.mendeley_database):
sys.stderr.write('File "%s" does not exist\n' % args.mendeley_database)
exit(1)
# Check path to directory where PDFs are stored
if not os.path.isdir(args.file_path):
sys.stderr.write('"%s" is not a directory\n' % args.file_path)
exit(1)
with MendeleyDB(
args.mendeley_database,
args.file_path,
args.dry_run) as mendeley_db:
run_synchronisation(
mendeley_db, args.text_database,
args.dry_run, args.force_update)
class MendeleyDB(object):
"""
An interface to the Mendeley database
"""
def __init__(self, path, file_path, dry_run=False):
self.path = path
self.base_url = directory_to_url(file_path)
self.dry_run = dry_run
def __enter__(self):
"""
Open the database connection
"""
self.connection = sqlite3.connect(self.path)
self.cursor = self.connection.cursor()
return self
def __exit__(self, exc_type, exc_value, traceback):
"""
Close the database connection
"""
self.connection.commit()
self.cursor.close()
def execute_unsafe(self, statement, values=()):
"""
Execute an SQL statement that may alter data
If dry_run is set, print the statement and don't execute anything.
This is useful for debugging or just for peace of mind.
"""
if self.dry_run:
s = statement
for v in values:
s = s.replace('?', '"%s"' % str(v), 1)
print("Executing: %s" % s)
else:
return self.cursor.execute(statement, values)
def get_document(self, id):
"""
Get a document using the document id
"""
self.cursor.execute(
"SELECT uuid, citationKey FROM Documents WHERE id = ?", (id, ))
result = self.cursor.fetchone()
if result:
uuid, citation_key = result
if citation_key is None:
citation_key = ""
else:
raise KeyError("Could not find document with id %s" % id)
return (uuid, citation_key)
def document_id(self, uuid):
"""
Get the db primary key for a document from the uuid
"""
self.cursor.execute(
"SELECT id FROM Documents WHERE uuid = ?", (uuid, ))
result = self.cursor.fetchone()
if result:
return result[0]
else:
raise KeyError("Couldn't find document with uuid %s" % uuid)
def get_file_name(self, hash):
"""
Find the file name from the file hash
"""
self.cursor.execute(
"SELECT localUrl FROM Files WHERE hash = ?", (hash, ))
result = self.cursor.fetchone()
if result:
full_path = result[0]
return full_path.replace(self.base_url + u'/', '')
else:
raise KeyError("Couldn't find file with hash %s" % hash)
def document_files(self):
"""
Return all files associated with documents
"""
self.cursor.execute("SELECT documentId, hash FROM DocumentFiles")
for document_id, file_hash in self.cursor.fetchall():
doc_uuid, doc_citation_key = self.get_document(document_id)
file_name = self.get_file_name(file_hash)
# Some files are not stored locally, so the file name is not set
if file_name:
yield DocumentFile(
doc_uuid, doc_citation_key, file_hash, file_name)
def add_file(self, document_file):
"""
Add the file to the database and attach it to the document
"""
# Check document exists in Mendeley database
try:
document_id = self.document_id(document_file.uuid)
except KeyError:
sys.stderr.write(
"Warning: No Mendeley document for file %s.\n"
"Perhaps you need to synchronise your Mendeley "
"desktop client first.\n" % document_file.name)
return
# Check file doesn't already exist
self.cursor.execute(
"SELECT hash FROM Files WHERE hash = ?",
(document_file.hash, ))
result = self.cursor.fetchone()
if result:
sys.stderr.write("Warning: File hash already exists "
"for file %s.\n" % document_file.name)
return
# Insert file
file_url = u'/'.join((self.base_url, document_file.name))
self.execute_unsafe(
"INSERT INTO Files (hash, localUrl) VALUES (?, ?)",
(document_file.hash, file_url))
# Link file to document
self.execute_unsafe(
"INSERT INTO DocumentFiles "
"(documentId, hash, remoteUrl, unlinked, downloadRestricted) "
"VALUES (?, ?, '', 'false', 'false')",
(document_id, document_file.hash))
def update_file(self, document_file):
"""
Update the file path for an existing file
"""
file_url = u'/'.join((self.base_url, document_file.name))
self.execute_unsafe(
"UPDATE Files SET localUrl=? WHERE hash=?",
(file_url, document_file.hash))
class DocumentFile(object):
"""
A file associated with a reference document
for storing in the text database
"""
# Separator used in the text database
sep = u':::'
def __init__(self, uuid, key, hash, name):
# uuid and key represent document
# there may be multiple files with the same document
self.uuid = uuid
self.key = key
# hash and name represent file
self.hash = hash
self.name = name
@classmethod
def from_text(cls, line):
"""
Initialise a new entry from the text representation
"""
try:
(uuid, key, hash, name) = line.strip().split(cls.sep)
except ValueError:
raise ValueError("Invalid database line: %s" % line)
return cls(uuid, key, hash, name)
def text_entry(self):
"""
Return a string representing the entry in the
format used by text database
"""
return self.sep.join((self.uuid, self.key, self.hash, self.name))
def sort_key(self):
"""
Key used to sort document files in the text database
"""
if self.key:
return self.key.lower()
else:
return self.name.lower()
def directory_to_url(path):
"""
Convert a directory path to a URL format
"""
path = os.path.abspath(path)
# Remove leading slash so Linux and Windows paths both
# don't have a slash, which can then be added
if path.startswith('/'):
path = path[1:]
# Make sure separators are forward slashes
path = path.replace(os.sep, '/')
if path.endswith('/'):
path = path[:-1]
# Url encode special characters
url = u'file:///' + urllib.quote(path, safe='/:').decode('ascii')
return url
def relative_file(file):
"""
Check that a file is within the PDF storage directory
"""
# If it is, the base path will have been removed
return file.name.find(u'file://') < 0
def get_new_files(afiles, bfiles):
"""
Compare a list of files and return a list of the new ones
"""
afile_hashes = set(afile.hash for afile in afiles)
# Check that the file doesn't exist in the other set and make sure it
# also isn't outside the base path, in which case it's ignored
new_files = (file for file in bfiles if
file.hash not in afile_hashes)
return ifilter(relative_file, new_files)
def get_different_files(afiles, bfiles):
"""
Check if any file names have changed
"""
a_file_names = dict((file.hash, file.name) for file in afiles)
# Find files with same hash but named differently
different_files = (
(file, a_file_names[file.hash]) for file in bfiles if
file.hash in a_file_names and
file.name != a_file_names[file.hash])
return different_files
def run_synchronisation(mendeley_db, text_database_path,
dry_run=False, force_update=False):
"""
Synchronise updates between the Mendeley database and
text file database
"""
mendeley_entries = set(mendeley_db.document_files())
if os.path.isfile(text_database_path):
with open(text_database_path, 'r') as text_db_file:
text_db_entries = set(
DocumentFile.from_text(line.decode('utf-8'))
for line in text_db_file)
else:
# Assume this is the first run and the text database
# hast not yet been created
print("Creating new text database file.")
text_db_entries = set()
# Add new files from Mendeley to the text database
new_files = set(get_new_files(text_db_entries, mendeley_entries))
if new_files:
print("New files from Mendeley:")
for f in new_files:
print(f.name)
text_db_entries.add(f)
else:
print("No new files from Mendeley.")
# Update Mendeley database with new files from the text database
new_files = set(get_new_files(mendeley_entries, text_db_entries))
if new_files:
print("New files from the text database:")
for f in new_files:
print(f.name)
mendeley_db.add_file(f)
else:
print("No new files from the text database.")
# Write out any conflicts where files exist in both but have
# different locations, so that conflicts can be manually resolved,
# or override the file path in Mendeley if force_update is set
different_files = get_different_files(mendeley_entries, text_db_entries)
for different_file, conflicting_name in different_files:
if force_update:
sys.stderr.write(
"Forcing update: %s to %s\n" %
(conflicting_name, different_file.name))
mendeley_db.update_file(different_file)
else:
sys.stderr.write(
"Conflict: %s, %s\n" %
(conflicting_name, different_file.name))
# Write updated text database file
text_db_lines = ((file.text_entry() + u'\n').encode('utf-8')
for file in sorted(text_db_entries, key=lambda f: f.sort_key()))
if not dry_run:
with open(text_database_path, 'w') as text_db_file:
for line in text_db_lines:
text_db_file.write(line)
else:
print("Text file:")
for line in text_db_lines:
sys.stdout.write(line)
if __name__ == '__main__':
main()
| gpl-3.0 | 2,736,401,674,511,344,000 | 30.855362 | 79 | 0.584155 | false | 4.034744 | false | false | false |
jwlawson/tensorflow | tensorflow/contrib/py2tf/pyct/templates_test.py | 3 | 2534 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for templates module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gast
from tensorflow.contrib.py2tf.pyct import compiler
from tensorflow.contrib.py2tf.pyct import templates
from tensorflow.python.platform import test
class TemplatesTest(test.TestCase):
def test_replace_variable(self):
def template(a): # pylint:disable=unused-argument
def test_fn(a): # pylint:disable=unused-variable
a += 1
a = 2 * a + 1
return b # pylint:disable=undefined-variable
node = templates.replace(
template, a=gast.Name('b', gast.Load(), None))[0]
result = compiler.ast_to_object(node)
self.assertEquals(7, result.test_fn(2))
def test_replace_function_name(self):
def template(fname): # pylint:disable=unused-argument
def fname(a): # pylint:disable=function-redefined
a += 1
a = 2 * a + 1
return a
node = templates.replace(
template, fname=gast.Name('test_fn', gast.Load(), None))[0]
result = compiler.ast_to_object(node)
self.assertEquals(7, result.test_fn(2))
def test_code_block(self):
def template(block): # pylint:disable=unused-argument
def test_fn(a): # pylint:disable=unused-variable
block # pylint:disable=pointless-statement
return a
node = templates.replace(
template,
block=[
gast.Assign(
[
gast.Name('a', gast.Store(), None)
],
gast.BinOp(
gast.Name('a', gast.Load(), None),
gast.Add(),
gast.Num(1))),
] * 2)[0]
result = compiler.ast_to_object(node)
self.assertEquals(3, result.test_fn(1))
if __name__ == '__main__':
test.main()
| apache-2.0 | 1,456,622,272,616,422,400 | 31.909091 | 80 | 0.619574 | false | 3.898462 | true | false | false |
APSL/Impostor | setup.py | 1 | 3564 | """
Shamelessly copied from django's setup.py and edited to fit
"""
from distutils.core import setup
from distutils.command.install_data import install_data
from distutils.command.install import INSTALL_SCHEMES
import os
import sys
class OsxInstallData(install_data):
# On MacOS, the platform-specific lib dir is /System/Library/Framework/Python/.../
# which is wrong. Python 2.5 supplied with MacOS 10.5 has an Apple-specific fix
# for this in distutils.command.install_data#306. It fixes install_lib but not
# install_data, which is why we roll our own install_data class.
def finalize_options(self):
# By the time finalize_options is called, install.install_lib is set to the
# fixed directory, so we set the installdir to install_lib. The
# install_data class uses ('install_data', 'install_dir') instead.
self.set_undefined_options('install', ('install_lib', 'install_dir'))
install_data.finalize_options(self)
if sys.platform == "darwin":
cmdclasses = {'install_data': OsxInstallData}
else:
cmdclasses = {'install_data': install_data}
def fullsplit(path, result=None):
"""
Split a pathname into components (the opposite of os.path.join) in a
platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
# Tell distutils to put the data_files in platform-specific installation
# locations. See here for an explanation:
# http://groups.google.com/group/comp.lang.python/browse_thread/thread/35ec7b2fed36eaec/2105ee4d9e8042cb
for scheme in INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
impostor_dir = 'impostor'
for dirpath, dirnames, filenames in os.walk(impostor_dir):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith('.'):
del dirnames[i]
if '__init__.py' in filenames:
packages.append('.'.join(fullsplit(dirpath)))
elif filenames:
data_files.append([dirpath, [os.path.join(dirpath, f) for f in filenames]])
# Small hack for working with bdist_wininst.
# See http://mail.python.org/pipermail/distutils-sig/2004-August/004134.html
if len(sys.argv) > 1 and sys.argv[1] == 'bdist_wininst':
for file_info in data_files:
file_info[0] = '\\PURELIB\\%s' % file_info[0]
setup(
name="Impostor",
version="1.0.2",
url='https://github.com/samastur/Impostor/',
author='Marko Samastur',
author_email='markos@gaivo.net',
description='Staff can login as a different user.',
long_description='Django app allowing staff with their credentials to login as other users.',
license='MIT License',
platforms=['any'],
packages=packages,
cmdclass=cmdclasses,
data_files=data_files,
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'
],
)
| mit | 5,156,180,230,204,205,000 | 35 | 104 | 0.659371 | false | 3.735849 | false | false | false |
pysmt/pysmt | pysmt/cmd/check_version.py | 1 | 2366 | # Copyright 2014 Andrea Micheli and Marco Gario
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import re
def check_version(module):
try:
if module == "z3":
import z3
(major, minor, ver, _) = z3.get_version()
version = "%d.%d.%d" % (major, minor, ver)
elif module == "msat":
import mathsat
version_str = mathsat.msat_get_version()
m = re.match(r"^MathSAT5 version (\d+\.\d+\.\d+) .*$", version_str)
if m is not None:
version = m.group(1)
elif module == "cudd":
import repycudd
doc = repycudd.DOCSTRING
m = re.match(r"^PyCUDD (\d+\.\d+\.\d+).*", doc)
if m is not None:
version = m.group(1)
elif module == "btor":
import pyboolector
version = "OK" # Just checking if import succeeds
elif module == "cvc4":
import CVC4
version = CVC4.Configuration_getVersionString()
elif module == "picosat":
import picosat
version = picosat.picosat_version()
elif module == "yices":
import yicespy
v = yicespy.__dict__['__YICES_VERSION']
m = yicespy.__dict__['__YICES_VERSION_MAJOR']
p = yicespy.__dict__['__YICES_VERSION_PATCHLEVEL']
version = "%d.%d.%d" % (v, m, p)
else:
print("Invalid argument '%s'" % module)
exit(-2)
except ImportError:
version = None
return version
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage: python %s <solver_name>" % sys.argv[0])
exit(-1)
module = sys.argv[1]
version = check_version(module)
if version is None:
print("NOT INSTALLED")
else:
print(version)
| apache-2.0 | -106,857,827,986,260,180 | 28.949367 | 79 | 0.559172 | false | 3.828479 | false | false | false |
manannayak/asynq | asynq/scheduler.py | 1 | 10768 | # Copyright 2016 Quora, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sys import stderr, stdout
import time
import threading
from qcore import utime
import qcore.events as core_events
from . import debug
from . import futures
from . import batching
from . import _debug
from .async_task import AsyncTask
assert str(__name__).endswith('asynq.scheduler') \
or str(__name__).endswith('asynq.lib64.scheduler'), \
"Are you importing asynq from the wrong directory?"
_debug_options = _debug.options
_futures_none = futures._none
class AsyncTaskError(Exception):
pass
class TaskScheduler(object):
"""Schedules and runs AsyncTask objects flushing batches when needed."""
def __init__(self):
self._last_dump_time = time.time()
self.on_before_batch_flush = core_events.EventHook()
self.on_after_batch_flush = core_events.EventHook()
thread = threading.current_thread()
thread_name = thread.name if thread.name else str(thread.ident)
if '_state' in globals():
_state.last_id += 1
_id = _state.last_id
else:
_id = 0
self.name = '%s / %d' % (thread_name, _id)
self.reset()
def reset(self):
self._batches = set()
self._tasks = []
self.active_task = None
def wait_for(self, task):
"""
Executes a task and ensures it's complete when this method returns.
:param tasks: task to wait for
:return: ``None``
"""
while not task.is_computed():
self._execute(task)
if task.is_computed():
break
self._continue_with_batch()
def _execute(self, root_task):
"""Implements task execution loop.
The postcondition of this method is that all tasks in the dependency tree of root_task
that aren't blocked on batch items waiting to be flushed should be executed until they are
(or until they're computed). This is done by running a depth-first search on the dependency
tree.
:param root_task: root of the dependency tree
:return: ``None``
"""
init_num_tasks = len(self._tasks)
self._tasks.append(root_task)
# Run the execution loop until the root_task is complete (it's either blocked on batch
# items waiting to be flushed, or computed).
while len(self._tasks) > init_num_tasks:
if len(self._tasks) > _debug_options.MAX_TASK_STACK_SIZE:
self.reset()
debug.dump(self)
raise RuntimeError('Number of scheduled tasks exceeded maximum threshold.')
# _tasks is a stack, so take the last one.
task = self._tasks[-1]
if _debug_options.DUMP_SCHEDULER_STATE:
self.try_time_based_dump()
if task.is_computed():
self._tasks.pop()
elif isinstance(task, AsyncTask):
self._handle_async_task(task)
elif isinstance(task, batching.BatchItemBase):
# This can happen multiple times per batch item (if we run _execute and this batch
# item doesn't get flushed), but that's ok because self._batches is a set.
self._schedule_batch(task.batch)
self._tasks.pop()
else:
task._compute()
self._tasks.pop()
def _schedule_batch(self, batch):
if batch.is_flushed():
if _debug_options.DUMP_SCHEDULE_BATCH:
debug.write("@async: can't schedule flushed batch %s" % debug.str(batch))
return False
if _debug_options.DUMP_SCHEDULE_BATCH and batch not in self._batches:
debug.write('@async: scheduling batch %s' % debug.str(batch))
self._batches.add(batch)
return True
def _flush_batch(self, batch):
self.on_before_batch_flush(batch)
try:
batch.flush()
finally:
self.on_after_batch_flush(batch)
return 0
def _handle_async_task(self, task):
# is_blocked indicates that one of the tasks dependencies isn't computed yet,
# so we can't run _continue until they are.
if task.is_blocked():
# _dependencies_scheduled indicates if we've already added the task's
# dependencies to the task stack. If the task is blocked and we've already
# scheduled and run its dependencies, it's blocked on batch items waiting
# to be flushed so we're done with this task.
if task._dependencies_scheduled:
# Set _dependencies_scheduled to false so on future runs of _execute,
# we add the dependencies to the task stack (since some of the batch items
# in the subtree might have been flushed)
if _debug_options.DUMP_CONTINUE_TASK:
debug.write('@async: skipping %s' % debug.str(task))
task._dependencies_scheduled = False
task._pause_contexts()
self._tasks.pop()
# If the task is blocked and we haven't scheduled its dependencies, we
# should do so now.
else:
task._dependencies_scheduled = True
task._resume_contexts()
for dependency in task._dependencies:
if not dependency.is_computed():
if _debug_options.DUMP_SCHEDULE_TASK:
debug.write('@async: scheduling task %s' % debug.str(dependency))
if _debug_options.DUMP_DEPENDENCIES:
debug.write('@async: +dependency: %s needs %s' % (debug.str(task), debug.str(dependency)))
self._tasks.append(dependency)
else:
self._continue_with_task(task)
def _continue_with_task(self, task):
task._resume_contexts()
self.active_task = task
if _debug_options.DUMP_CONTINUE_TASK:
debug.write('@async: -> continuing %s' % debug.str(task))
if _debug_options.COLLECT_PERF_STATS:
start = utime()
task._continue()
task._total_time += utime() - start
if task.is_computed() and isinstance(task, AsyncTask):
task.dump_perf_stats()
else:
task._continue()
if _debug_options.DUMP_CONTINUE_TASK:
debug.write('@async: <- continued %s' % debug.str(task))
self.active_task = None
# We get a new set of dependencies when we run _continue, so these haven't
# been scheduled.
task._dependencies_scheduled = False
def _continue_with_batch(self):
"""
Flushes one of batches (the longest one by default).
:param assert_no_batch: indicates whether exception must be
raised if there is no batch to flush
:return: the batch that was flushed, if there was a flush;
otherwise, ``None``.
"""
batch = self._select_batch_to_flush()
if batch is None:
if _debug_options.DUMP_FLUSH_BATCH:
debug.write('@async: no batch to flush')
else:
return None
self._batches.remove(batch)
self._flush_batch(batch)
return batch
def _select_batch_to_flush(self):
"""Returns the batch having highest priority,
or ``None``, if there are no batches.
This method uses ``BatchBase.get_priority()`` to
determine the priority.
Side effect: this method removed flushed batches.
:return: selected batch or None.
"""
best_batch = None
best_priority = None
batches_to_remove = None
for batch in self._batches:
if not batch.items or batch.is_flushed():
if batches_to_remove is None:
batches_to_remove = [batch]
else:
batches_to_remove.append(batch)
continue
priority = batch.get_priority()
if best_batch is None or best_priority < priority:
best_batch = batch
best_priority = priority
if batches_to_remove:
for batch in batches_to_remove:
self._batches.remove(batch)
return best_batch
def __str__(self):
return '%s %s (%d tasks, %d batches; active task: %s)' % \
(type(self), repr(self.name), len(self._tasks), len(self._batches), str(self.active_task))
def __repr__(self):
return self.__str__()
def dump(self, indent=0):
debug.write(debug.str(self), indent)
if self._tasks:
debug.write('Task queue:', indent + 1)
for task in self._tasks:
task.dump(indent + 2)
else:
debug.write('No tasks in task queue.', indent + 1)
if self._batches:
debug.write('Batches:', indent + 1)
for batch in self._batches:
batch.dump(indent + 2)
def try_time_based_dump(self, last_task=None):
current_time = time.time()
if (current_time - self._last_dump_time) < _debug_options.SCHEDULER_STATE_DUMP_INTERVAL:
return
self._last_dump_time = current_time
debug.write('\n--- Scheduler state dump: --------------------------------------------')
try:
self.dump()
if last_task is not None:
debug.write('Last task: %s' % debug.str(last_task), 1)
finally:
debug.write('----------------------------------------------------------------------\n')
stdout.flush()
stderr.flush()
class LocalTaskSchedulerState(threading.local):
def __init__(self):
self.last_id = 0
self.reset()
def reset(self):
self.current = TaskScheduler()
_state = LocalTaskSchedulerState()
globals()['_state'] = _state
def get_scheduler():
global _state
return _state.current
def reset():
_state.reset()
def get_active_task():
global _state
s = _state.current
return None if s is None else s.active_task
| apache-2.0 | 5,553,038,434,739,204,000 | 35.255892 | 118 | 0.571787 | false | 4.29004 | false | false | false |
joaormatos/anaconda | mmfparser/player/shaders/monochrome.py | 1 | 1427 | # Copyright (c) Mathias Kaerlev 2012.
# This file is part of Anaconda.
# Anaconda is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Anaconda is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Anaconda. If not, see <http://www.gnu.org/licenses/>.
from mmfparser.player.shader import Shader
MONOCHROME_SHADER = [
Shader(["""
varying vec2 texture_coordinate;
void main()
{
gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
gl_FrontColor = gl_Color;
texture_coordinate = vec2(gl_MultiTexCoord0);
}
"""], ["""
varying vec2 texture_coordinate;
uniform sampler2D Tex0;
void main()
{
vec4 col = texture2D(Tex0, texture_coordinate);
col *= vec4(0.299,0.587,0.114,1.0);
col.rgb = vec3(col.r+col.g+col.b);
gl_FragColor = col * gl_Color;
}
"""])
]
__all__ = ['MONOCHROME_SHADER'] | gpl-3.0 | 113,449,716,348,568,350 | 33 | 71 | 0.631395 | false | 3.735602 | false | false | false |
jeffmurphy/cif-db | src/cif-registry.py | 1 | 2615 | #!/usr/bin/python
import sys
import zmq
import random
import time
import os
import datetime
import json
import getopt
import socket
import happybase
import hashlib
import struct
import traceback
import re
# adjust to match your $PREFIX if you specified one
# default PREFIX = /usr/local
sys.path.append('/usr/local/lib/cif-protocol/pb-python/gen-py')
import msg_pb2
import feed_pb2
import control_pb2
import RFC5070_IODEF_v1_pb2
import MAEC_v2_pb2
import cifsupport
from DB.Registry import Registry
def HBConnection(hbhost):
pool = happybase.ConnectionPool(size=25, host=hbhost)
return pool
def usage():
print "cif-registry.py [-D #] [-h] [-d key] [-k key] [-t int|float|double|long|str] [-v value]\n\n"
def cast(t, v):
if t in ["int"]:
return int(v)
if t in ["long"]:
return long(v)
if t in ["double", "float"]:
return float(v)
return str(v)
try:
opts, args = getopt.getopt(sys.argv[1:], 't:v:k:d:D:H:h')
debug = 0
key_type = None
key_value = None
key_name = None
del_name = None
hbhost = "localhost"
for o, a in opts:
if o == "-t":
key_type = a
elif o == "-H":
hbhost = a
elif o == "-v":
key_value = a
elif o == "-k":
key_name = a
elif o == "-d":
del_name = a
elif o == "-h":
usage()
sys.exit(2)
elif o == "-D":
debug = a
connectionPool = HBConnection(hbhost)
reg = Registry(connectionPool, debug)
if del_name != None:
reg.delete(del_name)
kv = reg.get(del_name)
if kv != None:
print "Failed to delete the key: it seems to still be in the database."
elif key_name != None:
if key_type != None and key_value != None:
key_value = cast(key_type, key_value)
reg.set(key_name, key_value)
kv = reg.get(key_name)
if kv == key_value:
print key_name + " has been set to " + str(key_value)
else:
print "Failed? you gave me: " + str(key_value) + " but the database has " + str(kv)
else:
kv = reg.get(key_name)
print key_name + " = " + str(kv) + " " + str(type(kv))
else:
kl = reg.get()
print "Available keys: ", kl
except getopt.GetoptError as err:
print str(err)
usage()
sys.exit(2)
except Exception as e:
print e
traceback.print_tb(sys.exc_info()[2])
usage()
sys.exit(2)
| bsd-3-clause | -672,101,877,896,776,800 | 21.543103 | 103 | 0.540344 | false | 3.374194 | false | false | false |
cfrs7xx/OLD | check.py | 1 | 3089 | __author__ = 'tschlein'
#Checks for existence of dependencies (executable files, Python modules).
#Returns True if all dependencies are present; otherwise, returns False and prints missing files.
#TODO add Python modules
import sys
from os import path
import platform #https://docs.python.org/2/library/platform.html
import configparser #https://docs.python.org/2/library/configparser.html
import argparse #http://docs.python.org/3.4/library/argparse.html
#Parse the ini file to check whether dependencies are present.
def parse(file, verbose):
if verbose >= 1:
print('Entering parse:')
if verbose >= 2:
print('\tConfig file passed in:' + str(file))
#Declare list of missing executables and/or modules.
missing = '|[-] Missing the following:'
#Determine system: 'nt' for Windows, 'posix' for *nix, Mac OSX
system = platform.platform()
#Determine 32-bit vs. 64-bit architecture
if platform.architecture()[0] == '64bit':
architecture = 64
elif platform.architecture()[0] == '32bit':
architecture = 32
#Read the config file for parsing
config = configparser.ConfigParser()
config.read(file)
if 'Windows' in system:
for key in config['Windows']:
value = config.get('Windows', key)
if path.isfile(value):
if verbose >= 2:
print('\t| [+] ', value, '|')
else:
print('\t| [-] ', value, '|')
missing += '\n| [-]: '
missing += value
elif 'Linux' in system:
for key in config['Linux']:
value = config.get('Linux', key)
if path.isfile(value):
if verbose >= 2:
print('\t [+] ', value, '|')
else:
print('\t| [-] ', value, '|')
missing += '\n| [-]: '
missing += value
#Return True if all dependencies are present; otherwise, return False.
if (len(missing)):
return False, missing
else:
return True
#Parse the command line arguments.
def main(argv):
try:
global debug
verbose = 0
file = './paths.ini'
parser = argparse.ArgumentParser(description="Check whether required programs and modules exist.",
add_help=True)
parser.add_argument('-f', '--file', help='The file that contains paths for the required programs and modules.', required=False)
parser.add_argument('-v', '--verbose', help='The level of debugging.', type=int, required=False)
parser.add_argument('--version', action='version', version='%(prog)s 0.5')
args = parser.parse_args()
if args.file:
file = args.file
if args.verbose:
verbose = args.verbose
if verbose >= 1:
print('Entering Main:')
value, error = parse(file, verbose)
return value, error
except IOError:
sys.exit('Error: File ' + str(file) + ' does not exist.')
main(sys.argv[1:]) | bsd-3-clause | -415,552,071,004,011,800 | 31.526316 | 135 | 0.57462 | false | 4.248968 | true | false | false |
bertrik/panopt | panopt.py | 1 | 4862 | #!/usr/bin/env python
# Shows several JPG camera snapshots tiled into a bigger image.
#
# requires python2, ppython-sdl2 !
#
import os
import time
import sys
import ctypes
import urllib2
import json
from sdl2 import *
from sdl2.sdlimage import *
from sdl2.sdlttf import *
class Camera(object):
def __init__(self, x, y, scale, label, url):
self.x = x
self.y = y
self.scale = scale
self.label = label
self.url = url
def __repr__(self):
return json.dumps(self.__dict__, sort_keys=True, indent=4)
# reads one image from the source
def readframe(url):
try:
response = urllib2.urlopen(url)
return response.read()
except Exception:
return None
# returns a surface with the text rendered in white with a black outline in the specified size
def renderText(text, size):
font = TTF_OpenFont("VCR_OSD_MONO_1.001.ttf", size)
TTF_SetFontOutline(font, 2)
outline = TTF_RenderText_Blended(font, text, SDL_Color(0, 0, 0))
TTF_SetFontOutline(font, 0)
surface = TTF_RenderText_Blended(font, text, SDL_Color(255, 255, 255))
TTF_CloseFont(font)
SDL_BlitSurface(surface, None, outline, SDL_Rect(2, 2, 0, 0))
return outline
# renders one camera onto the window
def renderCamera(window, camera):
# get window properties
cw = ctypes.c_int()
ch = ctypes.c_int()
SDL_GetWindowSize(window, ctypes.byref(cw), ctypes.byref(ch))
w, h = cw.value, ch.value
# get JPG
jpeg = readframe(camera.url)
if jpeg is None:
return
rwops = SDL_RWFromMem(jpeg, sys.getsizeof(jpeg))
image = IMG_LoadTyped_RW(rwops, True, "JPG")
# blit scaled JPG
x = w * camera.x / 4
y = h * camera.y / 3
rect = SDL_Rect(x, y, w * camera.scale / 4, h * camera.scale / 3)
SDL_BlitScaled(image, None, SDL_GetWindowSurface(window), rect)
SDL_FreeSurface(image)
# draw text over it
SDL_BlitSurface(camera.osd, None, SDL_GetWindowSurface(window), rect)
SDL_UpdateWindowSurface(window)
def getDefaultLayout():
cameras = list()
cameras.append(Camera(0, 0, 1, "ACHTERDEUR", "http://localhost:8000/cgi-bin/nph-mjgrab?7"))
cameras.append(Camera(0, 1, 1, "WERKPLAATS", "http://localhost:8000/cgi-bin/nph-mjgrab?6"))
cameras.append(Camera(0, 2, 1, "KEUKEN", "http://localhost:8000/cgi-bin/nph-mjgrab?2"))
cameras.append(Camera(1, 0, 2, "SPACE", "http://localhost:8000/cgi-bin/nph-mjgrab?4"))
cameras.append(Camera(1, 2, 1, "SPACE", "http://localhost:8000/cgi-bin/nph-mjgrab?3"))
cameras.append(Camera(2, 2, 1, "3D-PRINTER", "http://localhost:8000/cgi-bin/nph-mjgrab?1"))
cameras.append(Camera(3, 0, 1, "PARKEER", "http://localhost:8000/cgi-bin/nph-mjgrab?9"))
cameras.append(Camera(3, 1, 1, "PARKEER", "http://localhost:8000/cgi-bin/nph-mjgrab?8"))
cameras.append(Camera(3, 2, 1, "INRIT", "http://localhost:8000/cgi-bin/nph-mjgrab?10"))
return cameras
# try to read layout file, or create new default if it does not exist
def readLayout(filename):
cameras = list()
try:
with open(filename, "r") as file:
dicts = json.loads(file.read())
for d in dicts:
camera = Camera(d["x"], d["y"], d["scale"], d["label"], d["url"])
cameras.append(camera)
except Exception as e:
print("Failed to read, using defaults")
cameras = getDefaultLayout()
if not os.path.exists(filename):
with open(filename, "w") as file:
file.write(repr(cameras))
return cameras
def main():
SDL_Init(SDL_INIT_VIDEO)
TTF_Init()
window = SDL_CreateWindow(b"Panopticon",
SDL_WINDOWPOS_CENTERED,
SDL_WINDOWPOS_CENTERED,
1024,
768,
SDL_WINDOW_SHOWN)
SDL_SetWindowFullscreen(window, SDL_WINDOW_FULLSCREEN_DESKTOP)
cameras = readLayout("layout.txt")
event = SDL_Event()
iterations = 0
starttime = time.time()
lasttime = starttime
# prerender OSD
for camera in cameras:
camera.osd = renderText(camera.label, 30)
running = True
while running:
for camera in cameras:
if not running:
break
# draw one cam
renderCamera(window, camera)
# check for quit button
while SDL_PollEvent(ctypes.byref(event)):
if event.type == SDL_QUIT:
running = False
# calculate fps
iterations = iterations + 1
delta_t = (time.time() - starttime)
if delta_t > 0:
fps = iterations / delta_t
print(fps)
SDL_DestroyWindow(window)
SDL_Quit()
return 0
if __name__ == "__main__":
sys.exit(main())
| mit | -8,617,533,765,445,592,000 | 29.3875 | 95 | 0.602633 | false | 3.285135 | false | false | false |
rdevon/cortex | cortex/_lib/utils.py | 1 | 2635 | '''Utility methods
'''
import logging
import os
import numpy as np
import torch
__author__ = 'R Devon Hjelm'
__author_email__ = 'erroneus@gmail.com'
logger = logging.getLogger('cortex.util')
try:
_, _columns = os.popen('stty size', 'r').read().split()
_columns = int(_columns)
except ValueError:
_columns = 1
def print_section(s):
'''For printing sections to scripts nicely.
Args:
s (str): string of section
'''
h = s + ('-' * (_columns - len(s)))
print(h)
def update_dict_of_lists(d_to_update, **d):
'''Updates a dict of list with kwargs.
Args:
d_to_update (dict): dictionary of lists.
**d: keyword arguments to append.
'''
for k, v in d.items():
if isinstance(v, dict):
if k not in d_to_update.keys():
d_to_update[k] = {}
update_dict_of_lists(d_to_update[k], **v)
elif k in d_to_update.keys():
d_to_update[k].append(v)
else:
d_to_update[k] = [v]
def bad_values(d):
failed = {}
for k, v in d.items():
if isinstance(v, dict):
v_ = bad_values(v)
if v_:
failed[k] = v_
else:
if isinstance(v, (list, tuple)):
v_ = []
for v__ in v:
if isinstance(v__, torch.Tensor):
v_.append(v__.item())
else:
v_.append(v__)
v_ = np.array(v_).sum()
elif isinstance(v, torch.Tensor):
v_ = v.item()
else:
v_ = v
if np.isnan(v_) or np.isinf(v_):
failed[k] = v_
if len(failed) == 0:
return False
return failed
def convert_to_numpy(o):
if isinstance(o, torch.Tensor):
o = o.data.cpu().numpy()
if len(o.shape) == 1 and o.shape[0] == 1:
o = o[0]
elif isinstance(o, (torch.cuda.FloatTensor, torch.cuda.LongTensor)):
o = o.cpu().numpy()
elif isinstance(o, list):
for i in range(len(o)):
o[i] = convert_to_numpy(o[i])
elif isinstance(o, tuple):
o_ = tuple()
for i in range(len(o)):
o_ = o_ + (convert_to_numpy(o[i]),)
o = o_
elif isinstance(o, dict):
for k in o.keys():
o[k] = convert_to_numpy(o[k])
return o
def compute_tsne(X, perplexity=40, n_iter=300, init='pca'):
from sklearn.manifold import TSNE
tsne = TSNE(2, perplexity=perplexity, n_iter=n_iter, init=init)
points = X.tolist()
return tsne.fit_transform(points)
| bsd-3-clause | -2,803,318,686,245,266,000 | 24.095238 | 72 | 0.494118 | false | 3.32702 | false | false | false |
xapple/plumbing | plumbing/scraping/__init__.py | 1 | 5761 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Written by Lucas Sinclair.
MIT Licensed.
Contact at www.sinclair.bio
"""
# Built-in modules #
import os
# Internal modules #
from plumbing.scraping.headers import make_headers
# First party modules #
import autopaths
from autopaths import Path
# Third party modules #
import requests
from retry import retry
###############################################################################
@retry(requests.exceptions.HTTPError, tries=8, delay=1, backoff=2)
def request(url,
header = None,
text = False,
content = False,
response = False,
**kwargs):
# Get #
resp = requests.get(url, headers=header, **kwargs)
# This will be caught by the decorator #
resp.raise_for_status()
# Pick what to return #
if text: return resp.text
if content: return resp.content
if response: return resp
###############################################################################
def retrieve_from_url(url, user_agent=None, **kwargs):
"""
Return the text content of a resource (e.g. the HTML).
By default we will retry if an HTTP error arises.
"""
# Custom user agent if needed #
if user_agent is not None: header = make_headers(user_agent)
else: header = ""
# Download #
content = request(url, header, text=True, **kwargs)
# Return #
return content
###############################################################################
def stream_from_url(*args, **kwargs):
"""
Save the resource as a file on disk iteratively by first asking
for the 'content-length' header entry and downloading in chunks.
By default we will retry if an HTTP error arises.
By default we will uncompress a downloaded file if it is zipped.
"""
# Just redirect to download_from_url #
kwargs.update({'steam': True})
return download_from_url(*args, **kwargs)
def download_from_url(url,
destination = None,
uncompress = False,
user_agent = 1,
stream = False,
progress = False,
desc = None,
**kwargs):
"""
Save the resource as a file on disk.
"""
# Custom user agent if needed #
if user_agent is not None: header = make_headers(user_agent)
else: header = ""
# Download #
if stream: response = request(url, header, response=True, stream=True, **kwargs)
else: content = request(url, header, content=True, **kwargs)
# Get total size #
if stream:
total_size = int(response.headers.get('content-length', -1))
num_blocks = 1500
block_size = int(total_size/num_blocks)
# Sometimes we don't get content-length #
if stream and total_size < 0:
return download_from_url(url, destination, uncompress, user_agent,
False, False, **kwargs)
# Choose the right option for destination #
destination = handle_destination(url, destination)
# How the progress bar should look like #
bar = '{l_bar}{bar}| {n_fmt}/{total_fmt} [{remaining}, ' \
'{rate_fmt}{postfix}]'
# Delete the destination if there is any exception or a ctrl-c #
try:
# With streaming #
if stream:
generator = response.iter_content(chunk_size=block_size)
if progress:
# In the future replace with `from tqdm.rich import tqdm`
from tqdm import tqdm
bar = tqdm(bar_format = bar,
desc = desc,
total = total_size,
unit = 'iB',
unit_scale = True)
with open(destination, "wb") as handle:
for data in generator:
handle.write(data)
bar.update(len(data))
bar.close()
else:
with open(destination, "wb") as handle:
for data in generator: handle.write(data)
# Without streaming #
if not stream:
with open(destination, "wb") as handle: handle.write(content)
except:
if os.path.exists(destination): os.remove(destination)
raise
# Uncompress the result #
if uncompress:
# To detect tar.gz we rely on the extension #
if destination.endswith('.tar.gz'):
return destination.untargz_to()
# Otherwise read the magic number #
with open(destination, 'rb') as f:
header = f.read(4)
# If it's a zip file #
if header == b"PK\x03\x04":
return destination.unzip_to(inplace=True)
# If it's a gzip file #
elif header[:3] == b"\x1f\x8b\x08":
return destination.ungzip_to()
# Return #
return destination
###############################################################################
def handle_destination(url, destination):
"""
The destination can be either unspecified or can contain either a file path
or a directory path.
"""
# Choose a default for destination #
if destination is None:
destination = autopaths.tmp_path.new_temp_file()
# Directory case - choose a filename #
elif destination.endswith('/'):
filename = url.split("/")[-1].split("?")[0]
destination = Path(destination + filename)
destination.directory.create_if_not_exists()
# Normal case #
else:
destination = Path(destination)
destination.directory.create_if_not_exists()
# Return #
return destination
| mit | -4,436,883,466,183,395,300 | 34.561728 | 84 | 0.541399 | false | 4.354497 | false | false | false |
bharathramh92/shop | seller/ranking.py | 1 | 1698 | from django.db.models import Q, Sum, Count
from store_db.models import SellerFeedback, Inventory, SellerRating
def rank_seller_from_inventory(inventory):
# call this method if inventory is added/updated or new feedback is received
# based on seller_points, return_accepted, local_pickup, free_domestic_shipping
rank_seller(inventory.seller)
total_points = inventory.seller.seller_rank.points
if inventory.return_accepted:
total_points += 10
if inventory.local_pick_up_accepted:
total_points += 10
if inventory.free_domestic_shipping:
total_points += 100
inventory.rating = total_points
print(total_points)
inventory.save()
def rank_seller(seller):
# call this method if feedback is created/edited or if new sales are made.
# maximum review points is 5
total_sales_made = Inventory.objects.filter(seller=seller).aggregate(tsm=Sum('total_sold'))
total_sales_made= total_sales_made['tsm']
sfd = SellerFeedback.objects.filter(seller=seller).aggregate(total_review_points=Sum('review_points'),
number_of_reviews=Count('review_points'))
if sfd['total_review_points'] is None:
sfd['total_review_points'] = 0
if total_sales_made == 0:
seller_rating_points = 100
else:
seller_rating_points = (total_sales_made-sfd['number_of_reviews'])*5 + sfd['total_review_points']/sfd['total_sales_made']
SellerRating.objects.update_or_create(seller=seller, defaults={'points': seller_rating_points})
def get_ranked_inventory_list(item):
inventories = item.item_inventory.all()
return inventories.order_by('-rating')
| mit | -2,449,228,980,046,992,000 | 41.45 | 129 | 0.684923 | false | 3.552301 | false | false | false |
ipapusha/amnet | tests/test_util.py | 1 | 5635 | import numpy as np
import amnet
import amnet.util
import z3
import sys
import unittest
class TestUtil(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
def test_maxN_z3(self):
x = z3.Real('x')
y = z3.Real('y')
z = z3.Real('z')
w1 = amnet.util.maxN_z3([x])
w2 = amnet.util.maxN_z3([x, y])
w3 = amnet.util.maxN_z3([x, y, z])
s = z3.Solver()
s.push()
s.add(x == -3)
s.add(y == 2)
s.add(z == 12)
self.assertTrue(s.check() == z3.sat)
# extract the output
model = s.model()
self.assertTrue(amnet.util.mfp(model, x) == -3)
self.assertTrue(amnet.util.mfp(model, y) == 2)
self.assertTrue(amnet.util.mfp(model, z) == 12)
self.assertTrue(amnet.util.mfp(model, w1) == -3)
self.assertTrue(amnet.util.mfp(model, w2) == 2)
self.assertTrue(amnet.util.mfp(model, w3) == 12)
s.pop()
def test_minN_z3(self):
x = z3.Real('x')
y = z3.Real('y')
z = z3.Real('z')
w1 = amnet.util.minN_z3([x])
w2 = amnet.util.minN_z3([x, y])
w3 = amnet.util.minN_z3([x, y, z])
s = z3.Solver()
s.push()
s.add(x == -3)
s.add(y == 2)
s.add(z == 12)
self.assertTrue(s.check() == z3.sat)
# extract the output
model = s.model()
self.assertTrue(amnet.util.mfp(model, x) == -3)
self.assertTrue(amnet.util.mfp(model, y) == 2)
self.assertTrue(amnet.util.mfp(model, z) == 12)
self.assertTrue(amnet.util.mfp(model, w1) == -3)
self.assertTrue(amnet.util.mfp(model, w2) == -3)
self.assertTrue(amnet.util.mfp(model, w3) == -3)
s.pop()
def test_abs_z3(self):
x = z3.Real('x')
y = z3.Real('y')
s = z3.Solver()
s.push()
s.add(x == -3)
s.add(y == amnet.util.abs_z3(x))
self.assertTrue(s.check() == z3.sat)
# extract the output
model = s.model()
self.assertTrue(amnet.util.mfp(model, x) == -3)
self.assertTrue(amnet.util.mfp(model, y) == 3)
s.pop()
s.push()
s.add(x == 4)
s.add(y == amnet.util.abs_z3(x))
self.assertTrue(s.check() == z3.sat)
# extract the output
model = s.model()
self.assertTrue(amnet.util.mfp(model, x) == 4)
self.assertTrue(amnet.util.mfp(model, y) == 4)
s.pop()
def test_norm1_z3(self):
x = z3.RealVector(prefix='x', sz=3)
y = z3.Real('y')
s = z3.Solver()
s.add(y == amnet.util.norm1_z3(x))
s.push()
s.add([x[0] == 1, x[1] == 12, x[2] == -2])
self.assertTrue(s.check() == z3.sat)
model = s.model()
self.assertTrue(amnet.util.mfp(model, y) == abs(1) + abs(12) + abs(-2))
s.pop()
s.push()
s.add([x[0] == -1, x[1] == 0, x[2] == 0])
self.assertTrue(s.check() == z3.sat)
model = s.model()
self.assertTrue(amnet.util.mfp(model, y) == abs(-1) + abs(0) + abs(0))
s.pop()
def test_norminf_z3(self):
x = z3.RealVector(prefix='x', sz=3)
y = z3.Real('y')
s = z3.Solver()
s.add(y == amnet.util.norminf_z3(x))
s.push()
s.add([x[0] == 1, x[1] == 12, x[2] == -2])
self.assertTrue(s.check() == z3.sat)
model = s.model()
self.assertTrue(amnet.util.mfp(model, y) == 12)
s.pop()
s.push()
s.add([x[0] == -1, x[1] == 0, x[2] == 0])
self.assertTrue(s.check() == z3.sat)
model = s.model()
self.assertTrue(amnet.util.mfp(model, y) == 1)
s.pop()
s.push()
s.add([x[0] == -1, x[1] == -11, x[2] == 0])
self.assertTrue(s.check() == z3.sat)
model = s.model()
self.assertTrue(amnet.util.mfp(model, y) == 11)
s.pop()
def test_gaxpy_z3(self):
m = 2
n = 3
A = np.array([[1, 2, -3], [4, -5, 6]])
x = z3.RealVector(prefix='x', sz=n)
y = np.array([7, -8])
w0 = z3.RealVector(prefix='w0', sz=m)
w1 = z3.RealVector(prefix='w1', sz=m)
w0v = amnet.util.gaxpy_z3(A, x)
w1v = amnet.util.gaxpy_z3(A, x, y)
self.assertEqual(len(w0), m)
self.assertEqual(len(w1), m)
self.assertEqual(len(w0v), m)
self.assertEqual(len(w1v), m)
s = z3.Solver()
s.add([w0[i] == w0v[i]
for i in range(m)])
s.add([w1[i] == w1v[i]
for i in range(m)])
s.push()
xc = np.array([1, 2, 3])
s.add([x[i] == xc[i] for i in range(n)])
w0_true = np.dot(A, xc)
w1_true = np.dot(A, xc) + y
self.assertTrue(s.check() == z3.sat)
model = s.model()
for i in range(m):
self.assertEqual(amnet.util.mfp(model, w0[i]), w0_true[i])
self.assertEqual(amnet.util.mfp(model, w1[i]), w1_true[i])
s.pop()
s.push()
xc = np.array([1, 0, -3])
s.add([x[i] == xc[i] for i in range(n)])
w0_true = np.dot(A, xc)
w1_true = np.dot(A, xc) + y
self.assertTrue(s.check() == z3.sat)
model = s.model()
for i in range(m):
self.assertEqual(amnet.util.mfp(model, w0[i]), w0_true[i])
self.assertEqual(amnet.util.mfp(model, w1[i]), w1_true[i])
s.pop()
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestUtil)
result = unittest.TextTestRunner(verbosity=2).run(suite)
sys.exit(not result.wasSuccessful())
| bsd-3-clause | 3,380,758,134,173,479,400 | 27.459596 | 79 | 0.488199 | false | 2.711742 | true | false | false |
quatanium/WebSocket-for-Python | example/droid_sensor_cherrypy_server.py | 8 | 2948 | # -*- coding: utf-8 -*-
import os.path
import cherrypy
from ws4py.server.cherrypyserver import WebSocketPlugin, WebSocketTool
from ws4py.websocket import WebSocket
class BroadcastWebSocketHandler(WebSocket):
def received_message(self, m):
cherrypy.engine.publish('websocket-broadcast', str(m))
class Root(object):
@cherrypy.expose
def display(self):
return """<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<title>WebSocket example displaying Android device sensors</title>
<link rel="stylesheet" href="/css/style.css" type="text/css" />
<script type='application/javascript' src='http://code.jquery.com/jquery-1.9.1.min.js'></script>
<script type="application/javascript" src="http://calebevans.me/projects/jcanvas/resources/jcanvas/jcanvas.min.js"> </script>
<script type="application/javascript" src="/js/droidsensor.js"> </script>
<script type="application/javascript">
$(document).ready(function() {
initWebSocket();
drawAll();
});
</script>
</head>
<body>
<section id="content" class="body">
<canvas id="canvas" width="900" height="620"></canvas>
</section>
</body>
</html>
"""
@cherrypy.expose
def ws(self):
cherrypy.log("Handler created: %s" % repr(cherrypy.request.ws_handler))
@cherrypy.expose
def index(self):
return """<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<title>WebSocket example displaying Android device sensors</title>
<script type='application/javascript' src='http://code.jquery.com/jquery-1.9.1.min.js'></script>
<script type="application/javascript" src="/js/droidsensor.js"> </script>
<script type="application/javascript">
$(document).ready(function() {
initWebSocketAndSensors();
});
</script>
</head>
<body>
</body>
</html>
"""
if __name__ == '__main__':
cherrypy.config.update({
'server.socket_host': '0.0.0.0',
'server.socket_port': 9000,
'tools.staticdir.root': os.path.abspath(os.path.join(os.path.dirname(__file__), 'static'))
}
)
print os.path.abspath(os.path.join(__file__, 'static'))
WebSocketPlugin(cherrypy.engine).subscribe()
cherrypy.tools.websocket = WebSocketTool()
cherrypy.quickstart(Root(), '', config={
'/js': {
'tools.staticdir.on': True,
'tools.staticdir.dir': 'js'
},
'/css': {
'tools.staticdir.on': True,
'tools.staticdir.dir': 'css'
},
'/images': {
'tools.staticdir.on': True,
'tools.staticdir.dir': 'images'
},
'/ws': {
'tools.websocket.on': True,
'tools.websocket.handler_cls': BroadcastWebSocketHandler
}
}
)
| bsd-3-clause | -6,804,676,188,090,151,000 | 30.361702 | 131 | 0.582768 | false | 3.760204 | false | false | false |
ZhangXiaoyu-Chief/sandwich | api/views/datacenter.py | 1 | 3535 | from api.libs.base import CoreView
from cmdb.models import DataCenter
from django.contrib.auth.models import User
from account.models import UserProfile
from django.db.utils import IntegrityError
class DataCenterView(CoreView):
"""
数据中心视图类
"""
login_required_action = ["get_list", "post_create", "post_delete", "post_change"]
superuser_required_action = ["post_create", "post_delete", "post_change"]
def get_list(self):
per_page = self.parameters("per_page")
if per_page:
datacenter_objs = self.page_split(DataCenter.objects.all())
else:
datacenter_objs = DataCenter.objects.all()
datacenter_list = []
for datacenter_obj in datacenter_objs:
datacenter_list.append(datacenter_obj.get_info())
self.response_data['data'] = datacenter_list
def post_create(self):
try:
name = self.parameters("name")
contact = self.parameters("contact")
memo = self.parameters("memo")
address = self.parameters("address")
admin_id = int(self.parameters("admin"))
admin_obj = UserProfile.objects.filter(id=admin_id).first()
if admin_obj and admin_obj.user:
new_datacenter_obj = DataCenter(name=name, contact=contact, memo=memo, admin=admin_obj.user, address=address)
else:
new_datacenter_obj = DataCenter(name=name, contact=contact, memo=memo, address=address)
new_datacenter_obj.save()
self.response_data['data'] = new_datacenter_obj.get_info()
except IntegrityError:
self.response_data['status'] = False
self.status_code = 416
except Exception:
self.response_data['status'] = False
self.status_code = 500
def post_delete(self):
datacenter_id = self.parameters("id")
try:
datacenter_obj = DataCenter.objects.filter(id=datacenter_id).first()
if datacenter_obj:
datacenter_obj.delete()
else:
self.response_data['status'] = False
self.status_code = 404
except Exception as e:
self.response_data['status'] = False
self.status_code = 500
def post_change(self):
datacenter_id = self.parameters("id")
name = self.parameters("name")
admin_id = self.parameters("admin_id")
contact = self.parameters("contact")
memo = self.parameters("memo")
address = self.parameters("address")
try:
datacenter_obj = DataCenter.objects.filter(id=datacenter_id).first()
if datacenter_obj:
datacenter_obj.name = name
admin_obj = UserProfile.objects.filter(id=admin_id).first()
datacenter_obj.admin = admin_obj.user if admin_obj and hasattr(admin_obj, "user") else None
datacenter_obj.contact = contact
datacenter_obj.memo = memo
datacenter_obj.address = address
datacenter_obj.save()
self.response_data['data'] = datacenter_obj.get_info()
else:
self.response_data['status'] = False
self.status_code = 404
except IntegrityError:
self.response_data['status'] = False
self.status_code = 416
except Exception as e:
self.response_data['status'] = False
self.status_code = 500
| apache-2.0 | 2,748,276,854,288,054,000 | 39.011364 | 125 | 0.587617 | false | 4.237064 | false | false | false |
weegreenblobbie/pith-tool | setup.py | 1 | 1380 | """
pith tool
A simple command-line tool to execute Python while taking care of the PYTHONPATH.
See https://github.com/weegreenblobbie/pith-tool
"""
from setuptools import setup, Extension
# pypi.rst processing
with open("pypi.rst") as fd:
readme_rst = fd.read()
keywords = '''
development
command-line
script
'''.split()
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Topic :: Software Development",
"Topic :: Utilities",
]
description = """\
A simple command-line tool to execute Python while taking care of the
PYTHONPATH.
"""
from pith import __version__ as pith_version
setup(
author = "Nick Hilton et al",
author_email = "weegreenblobbie@yahoo.com",
classifiers = classifiers,
description = description,
keywords = keywords,
long_description = readme_rst,
name = "pith",
#~ py_modules = [],
scripts = ['pith'],
url = "https://github.com/weegreenblobbie/pith-tool",
version = pith_version,
) | mit | 3,382,690,777,753,286,700 | 24.109091 | 81 | 0.618116 | false | 3.887324 | false | false | false |
googleapis/gapic-generator-python | tests/system/test_grpc_lro.py | 1 | 1662 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import distutils
import os
import pytest
from datetime import datetime, timedelta, timezone
from google import showcase
def test_lro(echo):
future = echo.wait({
'end_time': datetime.now(tz=timezone.utc) + timedelta(seconds=1),
'success': {
'content': 'The hail in Wales falls mainly on the snails...eventually.'
}}
)
response = future.result()
assert isinstance(response, showcase.WaitResponse)
assert response.content.endswith('the snails...eventually.')
if distutils.util.strtobool(os.environ.get("GAPIC_PYTHON_ASYNC", "true")):
@pytest.mark.asyncio
async def test_lro_async(async_echo):
future = await async_echo.wait({
'end_time': datetime.now(tz=timezone.utc) + timedelta(seconds=1),
'success': {
'content': 'The hail in Wales falls mainly on the snails...eventually.'
}}
)
response = await future.result()
assert isinstance(response, showcase.WaitResponse)
assert response.content.endswith('the snails...eventually.')
| apache-2.0 | -8,864,799,227,039,772,000 | 34.361702 | 87 | 0.685921 | false | 3.966587 | false | false | false |
iamahuman/angr | angr/project.py | 1 | 33203 | import logging
import os
import types
from io import BytesIO, IOBase
import pickle
import string
from collections import defaultdict
import archinfo
from archinfo.arch_soot import SootAddressDescriptor, ArchSoot
import cle
from .misc.ux import deprecated
l = logging.getLogger(name=__name__)
def load_shellcode(shellcode, arch, start_offset=0, load_address=0):
"""
Load a new project based on a string of raw bytecode.
:param shellcode: The data to load
:param arch: The name of the arch to use, or an archinfo class
:param start_offset: The offset into the data to start analysis (default 0)
:param load_address: The address to place the data in memory (default 0)
"""
return Project(
BytesIO(shellcode),
main_opts={
'backend': 'blob',
'arch': arch,
'entry_point': start_offset,
'base_addr': load_address,
}
)
class Project:
"""
This is the main class of the angr module. It is meant to contain a set of binaries and the relationships between
them, and perform analyses on them.
:param thing: The path to the main executable object to analyze, or a CLE Loader object.
The following parameters are optional.
:param default_analysis_mode: The mode of analysis to use by default. Defaults to 'symbolic'.
:param ignore_functions: A list of function names that, when imported from shared libraries, should
never be stepped into in analysis (calls will return an unconstrained value).
:param use_sim_procedures: Whether to replace resolved dependencies for which simprocedures are
available with said simprocedures.
:param exclude_sim_procedures_func: A function that, when passed a function name, returns whether or not to wrap
it with a simprocedure.
:param exclude_sim_procedures_list: A list of functions to *not* wrap with simprocedures.
:param arch: The target architecture (auto-detected otherwise).
:param simos: a SimOS class to use for this project.
:param bool translation_cache: If True, cache translated basic blocks rather than re-translating them.
:param support_selfmodifying_code: Whether we aggressively support self-modifying code. When enabled, emulation
will try to read code from the current state instead of the original memory,
regardless of the current memory protections.
:type support_selfmodifying_code: bool
:param store_function: A function that defines how the Project should be stored. Default to pickling.
:param load_function: A function that defines how the Project should be loaded. Default to unpickling.
:param analyses_preset: The plugin preset for the analyses provider (i.e. Analyses instance).
:type analyses_preset: angr.misc.PluginPreset
:param engines_preset: The plugin preset for the engines provider (i.e. EngineHub instance).
:type engines_preset: angr.misc.PluginPreset
Any additional keyword arguments passed will be passed onto ``cle.Loader``.
:ivar analyses: The available analyses.
:type analyses: angr.analysis.Analyses
:ivar entry: The program entrypoint.
:ivar factory: Provides access to important analysis elements such as path groups and symbolic execution results.
:type factory: AngrObjectFactory
:ivar filename: The filename of the executable.
:ivar loader: The program loader.
:type loader: cle.Loader
:ivar storage: Dictionary of things that should be loaded/stored with the Project.
:type storage: defaultdict(list)
"""
def __init__(self, thing,
default_analysis_mode=None,
ignore_functions=None,
use_sim_procedures=True,
exclude_sim_procedures_func=None,
exclude_sim_procedures_list=(),
arch=None, simos=None,
load_options=None,
translation_cache=True,
support_selfmodifying_code=False,
store_function=None,
load_function=None,
analyses_preset=None,
concrete_target=None,
engines_preset=None,
**kwargs):
# Step 1: Load the binary
if load_options is None: load_options = {}
load_options.update(kwargs)
if arch is not None:
load_options.update({'arch': arch})
if isinstance(thing, cle.Loader):
if load_options:
l.warning("You provided CLE options to angr but you also provided a completed cle.Loader object!")
self.loader = thing
self.filename = self.loader.main_object.binary
elif hasattr(thing, 'read') and hasattr(thing, 'seek'):
l.info("Loading binary from stream")
self.filename = None
self.loader = cle.Loader(thing, **load_options)
elif not isinstance(thing, str) or not os.path.exists(thing) or not os.path.isfile(thing):
raise Exception("Not a valid binary file: %s" % repr(thing))
else:
# use angr's loader, provided by cle
l.info("Loading binary %s", thing)
self.filename = thing
self.loader = cle.Loader(self.filename, concrete_target=concrete_target, **load_options)
# Step 2: determine its CPU architecture, ideally falling back to CLE's guess
if isinstance(arch, str):
self.arch = archinfo.arch_from_id(arch) # may raise ArchError, let the user see this
elif isinstance(arch, archinfo.Arch):
self.arch = arch
elif arch is None:
self.arch = self.loader.main_object.arch
else:
raise ValueError("Invalid arch specification.")
# Step 3: Set some defaults and set the public and private properties
if not default_analysis_mode:
default_analysis_mode = 'symbolic'
if not ignore_functions:
ignore_functions = []
if isinstance(exclude_sim_procedures_func, types.LambdaType):
l.warning("Passing a lambda type as the exclude_sim_procedures_func argument to "
"Project causes the resulting object to be un-serializable.")
self._sim_procedures = {}
self.concrete_target = concrete_target
# It doesn't make any sense to have auto_load_libs
# if you have the concrete target, let's warn the user about this.
if self.concrete_target and load_options.get('auto_load_libs', None):
l.critical("Incompatible options selected for this project, please disable auto_load_libs if "
"you want to use a concrete target.")
raise Exception("Incompatible options for the project")
if self.concrete_target and self.arch.name not in ['X86', 'AMD64', 'ARMHF']:
l.critical("Concrete execution does not support yet the selected architecture. Aborting.")
raise Exception("Incompatible options for the project")
self._default_analysis_mode = default_analysis_mode
self._exclude_sim_procedures_func = exclude_sim_procedures_func
self._exclude_sim_procedures_list = exclude_sim_procedures_list
self._should_use_sim_procedures = use_sim_procedures
self._ignore_functions = ignore_functions
self._support_selfmodifying_code = support_selfmodifying_code
self._translation_cache = translation_cache
self._executing = False # this is a flag for the convenience API, exec() and terminate_execution() below
self._is_java_project = None
self._is_java_jni_project = None
if self._support_selfmodifying_code:
if self._translation_cache is True:
self._translation_cache = False
l.warning("Disabling IRSB translation cache because support for self-modifying code is enabled.")
self.entry = self.loader.main_object.entry
self.storage = defaultdict(list)
self.store_function = store_function or self._store
self.load_function = load_function or self._load
# Step 4: Set up the project's plugin hubs
# Step 4.1: Engines. Get the preset from the loader, from the arch, or use the default.
engines = EngineHub(self)
if engines_preset is not None:
engines.use_plugin_preset(engines_preset)
elif self.loader.main_object.engine_preset is not None:
try:
engines.use_plugin_preset(self.loader.main_object.engine_preset)
except AngrNoPluginError:
raise ValueError("The CLE loader asked to use a engine preset: %s" % \
self.loader.main_object.engine_preset)
else:
try:
engines.use_plugin_preset(self.arch.name)
except AngrNoPluginError:
engines.use_plugin_preset('default')
self.engines = engines
self.factory = AngrObjectFactory(self)
# Step 4.2: Analyses
self.analyses = AnalysesHub(self)
self.analyses.use_plugin_preset(analyses_preset if analyses_preset is not None else 'default')
# Step 4.3: ...etc
self.kb = KnowledgeBase(self)
# Step 5: determine the guest OS
if isinstance(simos, type) and issubclass(simos, SimOS):
self.simos = simos(self) #pylint:disable=invalid-name
elif isinstance(simos, str):
self.simos = os_mapping[simos](self)
elif simos is None:
self.simos = os_mapping[self.loader.main_object.os](self)
else:
raise ValueError("Invalid OS specification or non-matching architecture.")
# Step 6: Register simprocedures as appropriate for library functions
if isinstance(self.arch, ArchSoot) and self.simos.is_javavm_with_jni_support:
# If we execute a Java archive that includes native JNI libraries,
# we need to use the arch of the native simos for all (native) sim
# procedures.
sim_proc_arch = self.simos.native_arch
else:
sim_proc_arch = self.arch
for obj in self.loader.initial_load_objects:
self._register_object(obj, sim_proc_arch)
# Step 7: Run OS-specific configuration
self.simos.configure_project()
def _register_object(self, obj, sim_proc_arch):
"""
This scans through an objects imports and hooks them with simprocedures from our library whenever possible
"""
# Step 1: get the set of libraries we are allowed to use to resolve unresolved symbols
missing_libs = []
for lib_name in self.loader.missing_dependencies:
try:
missing_libs.append(SIM_LIBRARIES[lib_name])
except KeyError:
l.info("There are no simprocedures for missing library %s :(", lib_name)
# additionally provide libraries we _have_ loaded as a fallback fallback
# this helps in the case that e.g. CLE picked up a linux arm libc to satisfy an android arm binary
for lib in self.loader.all_objects:
if lib.provides in SIM_LIBRARIES:
simlib = SIM_LIBRARIES[lib.provides]
if simlib not in missing_libs:
missing_libs.append(simlib)
# Step 2: Categorize every "import" symbol in each object.
# If it's IGNORED, mark it for stubbing
# If it's blacklisted, don't process it
# If it matches a simprocedure we have, replace it
for reloc in obj.imports.values():
# Step 2.1: Quick filter on symbols we really don't care about
func = reloc.symbol
if func is None:
continue
if not func.is_function and func.type != cle.backends.symbol.SymbolType.TYPE_NONE:
continue
if not reloc.resolved:
# This is a hack, effectively to support Binary Ninja, which doesn't provide access to dependency
# library names. The backend creates the Relocation objects, but leaves them unresolved so that
# we can try to guess them here. Once the Binary Ninja API starts supplying the dependencies,
# The if/else, along with Project._guess_simprocedure() can be removed if it has no other utility,
# just leave behind the 'unresolved' debug statement from the else clause.
if reloc.owner.guess_simprocs:
l.debug("Looking for matching SimProcedure for unresolved %s from %s with hint %s",
func.name, reloc.owner, reloc.owner.guess_simprocs_hint)
self._guess_simprocedure(func, reloc.owner.guess_simprocs_hint)
else:
l.debug("Ignoring unresolved import '%s' from %s ...?", func.name, reloc.owner)
continue
export = reloc.resolvedby
if self.is_hooked(export.rebased_addr):
l.debug("Already hooked %s (%s)", export.name, export.owner)
continue
# Step 2.2: If this function has been resolved by a static dependency,
# check if we actually can and want to replace it with a SimProcedure.
# We opt out of this step if it is blacklisted by ignore_functions, which
# will cause it to be replaced by ReturnUnconstrained later.
if export.owner is not self.loader._extern_object and \
export.name not in self._ignore_functions:
if self._check_user_blacklists(export.name):
continue
owner_name = export.owner.provides
if isinstance(self.loader.main_object, cle.backends.pe.PE):
owner_name = owner_name.lower()
if owner_name not in SIM_LIBRARIES:
continue
sim_lib = SIM_LIBRARIES[owner_name]
if not sim_lib.has_implementation(export.name):
continue
l.info("Using builtin SimProcedure for %s from %s", export.name, sim_lib.name)
self.hook_symbol(export.rebased_addr, sim_lib.get(export.name, sim_proc_arch))
# Step 2.3: If 2.2 didn't work, check if the symbol wants to be resolved
# by a library we already know something about. Resolve it appropriately.
# Note that _check_user_blacklists also includes _ignore_functions.
# An important consideration is that even if we're stubbing a function out,
# we still want to try as hard as we can to figure out where it comes from
# so we can get the calling convention as close to right as possible.
elif reloc.resolvewith is not None and reloc.resolvewith in SIM_LIBRARIES:
sim_lib = SIM_LIBRARIES[reloc.resolvewith]
if self._check_user_blacklists(export.name):
if not func.is_weak:
l.info("Using stub SimProcedure for unresolved %s from %s", func.name, sim_lib.name)
self.hook_symbol(export.rebased_addr, sim_lib.get_stub(export.name, sim_proc_arch))
else:
l.info("Using builtin SimProcedure for unresolved %s from %s", export.name, sim_lib.name)
self.hook_symbol(export.rebased_addr, sim_lib.get(export.name, sim_proc_arch))
# Step 2.4: If 2.3 didn't work (the symbol didn't request a provider we know of), try
# looking through each of the SimLibraries we're using to resolve unresolved
# functions. If any of them know anything specifically about this function,
# resolve it with that. As a final fallback, just ask any old SimLibrary
# to resolve it.
elif missing_libs:
for sim_lib in missing_libs:
if sim_lib.has_metadata(export.name):
if self._check_user_blacklists(export.name):
if not func.is_weak:
l.info("Using stub SimProcedure for unresolved %s from %s", export.name, sim_lib.name)
self.hook_symbol(export.rebased_addr, sim_lib.get_stub(export.name, sim_proc_arch))
else:
l.info("Using builtin SimProcedure for unresolved %s from %s", export.name, sim_lib.name)
self.hook_symbol(export.rebased_addr, sim_lib.get(export.name, sim_proc_arch))
break
else:
if not func.is_weak:
l.info("Using stub SimProcedure for unresolved %s", export.name)
self.hook_symbol(export.rebased_addr, missing_libs[0].get(export.name, sim_proc_arch))
# Step 2.5: If 2.4 didn't work (we have NO SimLibraries to work with), just
# use the vanilla ReturnUnconstrained, assuming that this isn't a weak func
elif not func.is_weak:
l.info("Using stub SimProcedure for unresolved %s", export.name)
self.hook_symbol(export.rebased_addr, SIM_PROCEDURES['stubs']['ReturnUnconstrained'](display_name=export.name, is_stub=True))
def _guess_simprocedure(self, f, hint):
"""
Does symbol name `f` exist as a SIM_PROCEDURE? If so, return it, else return None.
Narrows down the set of libraries to search based on hint.
Part of the hack to enable Binary Ninja support. Remove if _register_objects() stops using it.
"""
# First, filter the SIM_LIBRARIES to a reasonable subset based on the hint
hinted_libs = []
if hint == "win":
hinted_libs = filter(lambda lib: lib if lib.endswith(".dll") else None, SIM_LIBRARIES)
else:
hinted_libs = filter(lambda lib: lib if ".so" in lib else None, SIM_LIBRARIES)
for lib in hinted_libs:
if SIM_LIBRARIES[lib].has_implementation(f.name):
l.debug("Found implementation for %s in %s", f, lib)
self.hook_symbol(f.relative_addr, (SIM_LIBRARIES[lib].get(f.name, self.arch)))
break
else:
l.debug("Could not find matching SimProcedure for %s, ignoring.", f.name)
def _check_user_blacklists(self, f):
"""
Has symbol name `f` been marked for exclusion by any of the user
parameters?
"""
return not self._should_use_sim_procedures or \
f in self._exclude_sim_procedures_list or \
f in self._ignore_functions or \
(self._exclude_sim_procedures_func is not None and self._exclude_sim_procedures_func(f))
@staticmethod
def _addr_to_str(addr):
return "%s" % repr(addr) if isinstance(addr, SootAddressDescriptor) else "%#x" % addr
#
# Public methods
# They're all related to hooking!
#
# pylint: disable=inconsistent-return-statements
def hook(self, addr, hook=None, length=0, kwargs=None, replace=False):
"""
Hook a section of code with a custom function. This is used internally to provide symbolic
summaries of library functions, and can be used to instrument execution or to modify
control flow.
When hook is not specified, it returns a function decorator that allows easy hooking.
Usage::
# Assuming proj is an instance of angr.Project, we will add a custom hook at the entry
# point of the project.
@proj.hook(proj.entry)
def my_hook(state):
print("Welcome to execution!")
:param addr: The address to hook.
:param hook: A :class:`angr.project.Hook` describing a procedure to run at the
given address. You may also pass in a SimProcedure class or a function
directly and it will be wrapped in a Hook object for you.
:param length: If you provide a function for the hook, this is the number of bytes
that will be skipped by executing the hook by default.
:param kwargs: If you provide a SimProcedure for the hook, these are the keyword
arguments that will be passed to the procedure's `run` method
eventually.
:param replace: Control the behavior on finding that the address is already hooked. If
true, silently replace the hook. If false (default), warn and do not
replace the hook. If none, warn and replace the hook.
"""
if hook is None:
# if we haven't been passed a thing to hook with, assume we're being used as a decorator
return self._hook_decorator(addr, length=length, kwargs=kwargs)
if kwargs is None: kwargs = {}
l.debug('hooking %s with %s', self._addr_to_str(addr), str(hook))
if self.is_hooked(addr):
if replace is True:
pass
elif replace is False:
l.warning("Address is already hooked, during hook(%s, %s). Not re-hooking.", self._addr_to_str(addr), hook)
return
else:
l.warning("Address is already hooked, during hook(%s, %s). Re-hooking.", self._addr_to_str(addr), hook)
if isinstance(hook, type):
raise TypeError("Please instanciate your SimProcedure before hooking with it")
if callable(hook):
hook = SIM_PROCEDURES['stubs']['UserHook'](user_func=hook, length=length, **kwargs)
self._sim_procedures[addr] = hook
def is_hooked(self, addr):
"""
Returns True if `addr` is hooked.
:param addr: An address.
:returns: True if addr is hooked, False otherwise.
"""
return addr in self._sim_procedures
def hooked_by(self, addr):
"""
Returns the current hook for `addr`.
:param addr: An address.
:returns: None if the address is not hooked.
"""
if not self.is_hooked(addr):
l.warning("Address %s is not hooked", self._addr_to_str(addr))
return None
return self._sim_procedures[addr]
def unhook(self, addr):
"""
Remove a hook.
:param addr: The address of the hook.
"""
if not self.is_hooked(addr):
l.warning("Address %s not hooked", self._addr_to_str(addr))
return
del self._sim_procedures[addr]
def hook_symbol(self, symbol_name, simproc, kwargs=None, replace=None):
"""
Resolve a dependency in a binary. Looks up the address of the given symbol, and then hooks that
address. If the symbol was not available in the loaded libraries, this address may be provided
by the CLE externs object.
Additionally, if instead of a symbol name you provide an address, some secret functionality will
kick in and you will probably just hook that address, UNLESS you're on powerpc64 ABIv1 or some
yet-unknown scary ABI that has its function pointers point to something other than the actual
functions, in which case it'll do the right thing.
:param symbol_name: The name of the dependency to resolve.
:param simproc: The SimProcedure instance (or function) with which to hook the symbol
:param kwargs: If you provide a SimProcedure for the hook, these are the keyword
arguments that will be passed to the procedure's `run` method
eventually.
:param replace: Control the behavior on finding that the address is already hooked. If
true, silently replace the hook. If false, warn and do not replace the
hook. If none (default), warn and replace the hook.
:returns: The address of the new symbol.
:rtype: int
"""
if type(symbol_name) is not int:
sym = self.loader.find_symbol(symbol_name)
if sym is None:
# it could be a previously unresolved weak symbol..?
new_sym = None
for reloc in self.loader.find_relevant_relocations(symbol_name):
if not reloc.symbol.is_weak:
raise Exception("Symbol is strong but we couldn't find its resolution? Report to @rhelmot.")
if new_sym is None:
new_sym = self.loader.extern_object.make_extern(symbol_name)
reloc.resolve(new_sym)
reloc.relocate([])
if new_sym is None:
l.error("Could not find symbol %s", symbol_name)
return None
sym = new_sym
basic_addr = sym.rebased_addr
else:
basic_addr = symbol_name
symbol_name = None
hook_addr, _ = self.simos.prepare_function_symbol(symbol_name, basic_addr=basic_addr)
self.hook(hook_addr, simproc, kwargs=kwargs, replace=replace)
return hook_addr
def is_symbol_hooked(self, symbol_name):
"""
Check if a symbol is already hooked.
:param str symbol_name: Name of the symbol.
:return: True if the symbol can be resolved and is hooked, False otherwise.
:rtype: bool
"""
sym = self.loader.find_symbol(symbol_name)
if sym is None:
l.warning("Could not find symbol %s", symbol_name)
return False
hook_addr, _ = self.simos.prepare_function_symbol(symbol_name, basic_addr=sym.rebased_addr)
return self.is_hooked(hook_addr)
def unhook_symbol(self, symbol_name):
"""
Remove the hook on a symbol.
This function will fail if the symbol is provided by the extern object, as that would result in a state where
analysis would be unable to cope with a call to this symbol.
"""
sym = self.loader.find_symbol(symbol_name)
if sym is None:
l.warning("Could not find symbol %s", symbol_name)
return False
if sym.owner is self.loader._extern_object:
l.warning("Refusing to unhook external symbol %s, replace it with another hook if you want to change it",
symbol_name)
return False
hook_addr, _ = self.simos.prepare_function_symbol(symbol_name, basic_addr=sym.rebased_addr)
self.unhook(hook_addr)
return True
def rehook_symbol(self, new_address, symbol_name):
"""
Move the hook for a symbol to a specific address
:param new_address: the new address that will trigger the SimProc execution
:param symbol_name: the name of the symbol (f.i. strcmp )
:return: None
"""
new_sim_procedures = {}
for key_address, simproc_obj in self._sim_procedures.items():
if simproc_obj.display_name == symbol_name:
new_sim_procedures[new_address] = simproc_obj
else:
new_sim_procedures[key_address] = simproc_obj
self._sim_procedures = new_sim_procedures
#
# A convenience API (in the style of triton and manticore) for symbolic execution.
#
def execute(self, *args, **kwargs):
"""
This function is a symbolic execution helper in the simple style
supported by triton and manticore. It designed to be run after
setting up hooks (see Project.hook), in which the symbolic state
can be checked.
This function can be run in three different ways:
- When run with no parameters, this function begins symbolic execution
from the entrypoint.
- It can also be run with a "state" parameter specifying a SimState to
begin symbolic execution from.
- Finally, it can accept any arbitrary keyword arguments, which are all
passed to project.factory.full_init_state.
If symbolic execution finishes, this function returns the resulting
simulation manager.
"""
if args:
state = args[0]
else:
state = self.factory.full_init_state(**kwargs)
pg = self.factory.simulation_manager(state)
self._executing = True
return pg.run(until=lambda lpg: not self._executing)
def terminate_execution(self):
"""
Terminates a symbolic execution that was started with Project.execute().
"""
self._executing = False
#
# Private methods related to hooking
#
def _hook_decorator(self, addr, length=0, kwargs=None):
"""
Return a function decorator that allows easy hooking. Please refer to hook() for its usage.
:return: The function decorator.
"""
def hook_decorator(func):
self.hook(addr, func, length=length, kwargs=kwargs)
return func
return hook_decorator
#
# Pickling
#
def __getstate__(self):
try:
store_func, load_func = self.store_function, self.load_function
self.store_function, self.load_function = None, None
return dict(self.__dict__)
finally:
self.store_function, self.load_function = store_func, load_func
def __setstate__(self, s):
self.__dict__.update(s)
def _store(self, container):
# If container is a filename.
if isinstance(container, str):
with open(container, 'wb') as f:
try:
pickle.dump(self, f, pickle.HIGHEST_PROTOCOL)
except RuntimeError as e: # maximum recursion depth can be reached here
l.error("Unable to store Project: '%s' during pickling", e)
# If container is an open file.
elif isinstance(container, IOBase):
try:
pickle.dump(self, container, pickle.HIGHEST_PROTOCOL)
except RuntimeError as e: # maximum recursion depth can be reached here
l.error("Unable to store Project: '%s' during pickling", e)
# If container is just a variable.
else:
try:
container = pickle.dumps(self, pickle.HIGHEST_PROTOCOL)
except RuntimeError as e: # maximum recursion depth can be reached here
l.error("Unable to store Project: '%s' during pickling", e)
@staticmethod
def _load(container):
if isinstance(container, str):
# If container is a filename.
if all(c in string.printable for c in container) and os.path.exists(container):
with open(container, 'rb') as f:
return pickle.load(f)
# If container is a pickle string.
else:
return pickle.loads(container)
# If container is an open file
elif isinstance(container, IOBase):
return pickle.load(container)
# What else could it be?
else:
l.error("Cannot unpickle container of type %s", type(container))
return None
def __repr__(self):
return '<Project %s>' % (self.filename if self.filename is not None else 'loaded from stream')
#
# Properties
#
@property
def use_sim_procedures(self):
return self._should_use_sim_procedures
@property
def is_java_project(self):
"""
Indicates if the project's main binary is a Java Archive.
"""
if self._is_java_project is None:
self._is_java_project = isinstance(self.arch, ArchSoot)
return self._is_java_project
@property
def is_java_jni_project(self):
"""
Indicates if the project's main binary is a Java Archive, which
interacts during its execution with native libraries (via JNI).
"""
if self._is_java_jni_project is None:
self._is_java_jni_project = isinstance(self.arch, ArchSoot) and self.simos.is_javavm_with_jni_support
return self._is_java_jni_project
#
# Compatibility
#
@property
@deprecated(replacement='simos')
def _simos(self):
return self.simos
from .errors import AngrNoPluginError
from .factory import AngrObjectFactory
from angr.simos import SimOS, os_mapping
from .analyses.analysis import AnalysesHub
from .knowledge_base import KnowledgeBase
from .engines import EngineHub
from .procedures import SIM_PROCEDURES, SIM_LIBRARIES
| bsd-2-clause | 5,732,707,143,433,387,000 | 44.17415 | 141 | 0.604524 | false | 4.32838 | false | false | false |
fnp/wolnelektury | src/dictionary/tests.py | 1 | 1782 | # This file is part of Wolnelektury, licensed under GNU Affero GPLv3 or later.
# Copyright © Fundacja Nowoczesna Polska. See NOTICE for more information.
#
from django.core.files.base import ContentFile
from catalogue.test_utils import *
from catalogue.models import Book
class DictionaryTests(WLTestCase):
def setUp(self):
WLTestCase.setUp(self)
self.book_info = BookInfoStub(
author=PersonStub(("Jim",), "Lazy"),
kind="X-Kind",
genre="X-Genre",
epoch="X-Epoch",
**info_args("Default Book")
)
def test_book_with_footnote(self):
book_text = b"""<utwor>
<opowiadanie>
<akap><pe><slowo_obce>rose</slowo_obce> --- kind of a flower.</pe></akap>
<akap><pe><slowo_obce>rose</slowo_obce> --- kind of a flower.</pe></akap>
<akap><pe><slowo_obce>rose</slowo_obce> (techn.) --- #FF007F.</pe></akap>
</opowiadanie></utwor>
"""
book = Book.from_text_and_meta(ContentFile(book_text), self.book_info)
self.assertEqual(
len(self.client.get('/przypisy/').context['object_list']),
2,
'There should be two notes on the note list.')
self.assertEqual(
len(self.client.get('/przypisy/?ltr=a').context['object_list']),
0,
'There should not be a note for the letter A.')
self.assertEqual(
len(self.client.get('/przypisy/?ltr=r').context['object_list']),
2,
'Both notes start with the letter R.')
self.assertEqual(
len(self.client.get('/przypisy/?qual=techn.').context['object_list']),
1,
'There should be a note qualified with \'techn.\' qualifier.')
| agpl-3.0 | 2,932,056,536,525,392,400 | 34.62 | 85 | 0.574958 | false | 3.431599 | false | false | false |
adrianpaesani/odoo-argentina | l10n_ar_invoice/models/account_chart_wizard.py | 2 | 5026 | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, api
import logging
_logger = logging.getLogger(__name__)
class wizard_multi_charts_accounts(models.TransientModel):
_inherit = 'wizard.multi.charts.accounts'
# @api.model
# def generate_journals(
# self, chart_template_id, acc_template_ref, company_id):
# """
# Overwrite this function so that no journal is created on chart
# installation
# """
# return True
@api.model
def _prepare_all_journals(
self, chart_template_id, acc_template_ref, company_id):
"""
Inherit this function so that we dont create sale and purchase journals
"""
journal_data = super(
wizard_multi_charts_accounts, self)._prepare_all_journals(
chart_template_id, acc_template_ref, company_id)
# remove sale and purchase journals data
new_journal_data = [
journal for journal in journal_data if journal['type'] not in [
'sale', 'purchase', 'sale_refund', 'purchase_refund']]
return new_journal_data
@api.model
def _create_bank_journals_from_o2m(
self, obj_wizard, company_id, acc_template_ref):
"""
Overwrite this function so that no journal is created on chart
installation
"""
return True
# @api.model
# def _prepare_all_journals(
# self, chart_template_id, acc_template_ref, company_id):
# """
# Inherit this function in order to add use document and other
# configuration if company use argentinian localization
# """
# journal_data = super(
# wizard_multi_charts_accounts, self)._prepare_all_journals(
# chart_template_id, acc_template_ref, company_id)
# # if argentinian chart, we set use_argentinian_localization for company
# company = self.env['res.company'].browse(company_id)
# if company.use_argentinian_localization:
# point_of_sale = self.env['afip.point_of_sale'].search([
# ('number', '=', 1),
# ('company_id', '=', company_id),
# ], limit=1)
# if not point_of_sale:
# point_of_sale = point_of_sale.create({
# 'name': 'Punto de venta 1',
# 'number': 1,
# 'company_id': company_id,
# })
# for vals_journal in journal_data:
# if vals_journal['type'] in [
# 'sale', 'sale_refund', 'purchase', 'purchase_refund']:
# vals_journal['use_documents'] = True
# vals_journal['point_of_sale_id'] = point_of_sale.id
# return journal_data
@api.model
def configure_chart(
self, company_id, currency_id,
chart_template_id, sale_tax_id, purchase_tax_id):
# return True
if self.env['account.account'].search(
[('company_id', '=', company_id)]):
_logger.warning(
'There is already a chart of account for company_id %i' % (
company_id))
return True
_logger.info(
'Configuring chart %i for company %i' % (
chart_template_id, company_id))
wizard = self.with_context(company_id=company_id).create({
'company_id': company_id,
'currency_id': currency_id,
'only_one_chart_template': True,
'chart_template_id': chart_template_id,
'code_digits': 7,
"sale_tax": sale_tax_id,
"purchase_tax": purchase_tax_id,
# 'sale_tax_rate': ,
# 'purchase_tax_rate': ,
# 'complete_tax_set': fie
})
wizard.execute()
# add default tax to current products
_logger.info('Updating products taxes')
tax_vals = {}
sale_tax_template = self.env['account.tax.template'].browse(
sale_tax_id)
sale_tax = self.env['account.tax'].search([
('company_id', '=', company_id),
('name', '=', sale_tax_template.name)], limit=1)
if sale_tax:
tax_vals['taxes_id'] = [(4, sale_tax.id)]
purchase_tax_template = self.env['account.tax.template'].browse(
purchase_tax_id)
purchase_tax = self.env['account.tax'].search([
('company_id', '=', company_id),
('name', '=', purchase_tax_template.name)], limit=1)
if purchase_tax:
tax_vals['supplier_taxes_id'] = [(4, purchase_tax.id)]
for product in self.env['product.product'].search([]):
product.write(tax_vals)
return True
| agpl-3.0 | 2,503,026,838,618,074,000 | 37.96124 | 81 | 0.524672 | false | 4.004781 | false | false | false |
sebastic/QGIS | python/plugins/processing/algs/otb/maintenance/OTBHelper.py | 4 | 29175 | # -*- coding: utf-8 -*-
"""
***************************************************************************
OTBHelper.py
---------------------
Copyright : (C) 2013 by CS Systemes d'information (CS SI)
Email : otb at c-s dot fr (CS SI)
Contributors : Julien Malik (CS SI) - File creation
Oscar Picas (CS SI) -
Alexia Mondot (CS SI) - Add particular case in xml creation
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Julien Malik, Oscar Picas, Alexia Mondot'
__copyright__ = '(C) 2013, CS Systemes d\'information (CS SI)'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
__version__ = "3.8"
import os
import copy
import xml.etree.ElementTree as ET
import traceback
from contextlib import contextmanager
import shutil
@contextmanager
def tag(name, c):
c.append("<%s>" % name)
yield
if ' ' in name:
c.append("</%s>" % name.split(' ')[0])
else:
c.append("</%s>" % name)
@contextmanager
def opentag(name, c):
c.append("<%s>" % name)
yield
def get_group(appInstance):
tags = appInstance.GetDocTags()
sectionTags = ["Image Manipulation", "Vector Data Manipulation", "Calibration", "Geometry", "Image Filtering", "Feature Extraction", "Stereo", "Learning", "Segmentation"]
for sectionTag in sectionTags:
for tag in tags:
if tag == sectionTag:
return sectionTag
return "Miscellaneous"
def set_OTB_log():
import logging
logger = logging.getLogger('OTBGenerator')
hdlr = logging.FileHandler('OTBGenerator.log')
hdlr.setLevel(logging.DEBUG)
cons = logging.StreamHandler()
cons.setLevel(logging.CRITICAL)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.addHandler(cons)
logger.setLevel(logging.DEBUG)
def get_OTB_log():
import logging
logger = logging.getLogger('OTBGenerator')
if not logger.handlers:
set_OTB_log()
logger = logging.getLogger('OTBGenerator')
return logger
def indent(elem, level=0):
i = "\n" + level * " "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent(elem, level + 1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
set_OTB_log()
def get_parameters():
parameters = {getattr(otbApplication, each): each for each in dir(otbApplication) if 'ParameterType_' in each}
return parameters
def get_inverted_parameters():
"""
This function allows mapping otb parameters with processing parameters.
"""
parameters = {getattr(otbApplication, each): each for each in dir(otbApplication) if 'ParameterType_' in each}
inverted_parameters = {key: value for value, key in parameters.items()}
inverted_parameters['ParameterType_Radius'] = 1
inverted_parameters['ParameterType_RAM'] = 1
inverted_parameters['ParameterType_ComplexInputImage'] = 9
inverted_parameters['ParameterType_ComplexOutputImage'] = 13
inverted_parameters_clone = copy.deepcopy(inverted_parameters)
inverted_parameters_clone['ParameterType_Empty'] = 'ParameterBoolean'
inverted_parameters_clone['ParameterType_Int'] = 'ParameterNumber'
inverted_parameters_clone['ParameterType_Float'] = 'ParameterNumber'
inverted_parameters_clone['ParameterType_String'] = 'ParameterString'
inverted_parameters_clone['ParameterType_StringList'] = 'ParameterString'
inverted_parameters_clone['ParameterType_InputFilename'] = 'ParameterFile'
inverted_parameters_clone['ParameterType_OutputFilename'] = 'OutputFile'
inverted_parameters_clone['ParameterType_Directory'] = 'ParameterFile'
inverted_parameters_clone['ParameterType_Choice'] = 'ParameterSelection'
inverted_parameters_clone['ParameterType_InputImage'] = 'ParameterRaster'
inverted_parameters_clone['ParameterType_InputImageList'] = 'ParameterMultipleInput'
inverted_parameters_clone['ParameterType_InputVectorData'] = 'ParameterVector'
inverted_parameters_clone['ParameterType_InputVectorDataList'] = 'ParameterMultipleInput'
inverted_parameters_clone['ParameterType_OutputImage'] = 'OutputRaster'
inverted_parameters_clone['ParameterType_OutputVectorData'] = 'OutputVector'
inverted_parameters_clone['ParameterType_Radius'] = 'ParameterNumber'
inverted_parameters_clone['ParameterType_Group'] = None
inverted_parameters_clone['ParameterType_ListView'] = 'ParameterSelection'
inverted_parameters_clone['ParameterType_ComplexInputImage'] = 'ParameterRaster'
inverted_parameters_clone['ParameterType_ComplexOutputImage'] = 'OutputRaster'
inverted_parameters_clone['ParameterType_RAM'] = 'ParameterNumber'
inverted_parameters_clone['ParameterType_InputProcessXML'] = 'ParameterFile'
inverted_parameters_clone['ParameterType_OutputProcessXML'] = 'ParameterFile'
inverted_parameters_clone['ParameterType_InputFilenameList'] = 'ParameterMultipleInput' # 'ParameterString'
return inverted_parameters_clone
def retrieve_module_name(param):
"""
returns the file parameter of the given processing parameter
"""
if param:
try:
import processing.core
dir_p = os.path.dirname(processing.core.__file__)
if 'Parameter' in param:
exec("from processing.core.parameters import %s" % param)
return os.path.join(dir_p, "parameters.py")
if 'Output' in param:
exec("from processing.core.outputs import %s" % param)
return os.path.join(dir_p, "outputs.py")
except ImportError as e:
print "Error parsing ", param
return None
def get_constructor_parameters_from_filename(py_file, param=""):
"""
Get all parameters from the constructor of the class param in the given py_file
"""
import ast
asto = ast.parse(open(py_file).read())
# get all class definitions corresponding to param given len(e1) should be 1
e1 = [each for each in asto.body if isinstance(each, ast.ClassDef) and each.name == param]
# e1[0].body lists all functions from the class e1[0]
# e2 is a list of __init__ functions of class e1[0]
e2 = [each for each in e1[0].body if hasattr(each, "name") and each.name == "__init__"]
if len(e2) > 0:
e4 = e2[0].args.args
else:
e4 = []
e5 = [each.id for each in e4]
return e5
def get_customize_app_functions():
"""
Get all parameters from the constructor of the class param in the given py_file
"""
import ast
py_file = os.path.join(os.path.dirname(__file__), "OTBSpecific_XMLcreation.py")
asto = ast.parse(open(py_file).read())
# get all class definitions corresponding to param given len(e1) should be 1
e1 = [each.name for each in asto.body if isinstance(each, ast.FunctionDef) and each.name.startswith("get")]
return e1
def get_xml_description_from_application_name(our_app, criteria=None):
"""
creates an xml containing information about the given our_app
"""
# creates the application to get the description
# header
app_instance = otbApplication.Registry.CreateApplication(our_app)
root = ET.Element('root')
app = ET.SubElement(root, 'key')
app.text = our_app
executable = ET.SubElement(root, 'exec')
executable.text = "otbcli_" + our_app
longname = ET.SubElement(root, 'longname')
longname.text = app_instance.GetDocName()
group = ET.SubElement(root, 'group')
group.text = get_group(app_instance)
desc = ET.SubElement(root, 'description')
desc.text = app_instance.GetDescription()
if not criteria:
real_criteria = lambda x: True
else:
if not callable(criteria):
raise Exception("criteria parameter must be a valid python callable")
real_criteria = criteria
if len(our_app) == 0:
raise Exception("App name is empty !")
# get parameters
param_keys = [param_key for param_key in app_instance.GetParametersKeys()]
param_keys = filter(real_criteria, param_keys)
for param_key in param_keys:
if not param_key == "inxml" and not param_key == "outxml":
get_param_descriptor(app.text, app_instance, param_key, root)
indent(root)
return root
def get_the_choices(app_instance, our_descriptor, root):
choices = ET.SubElement(root, 'choices')
for choice in app_instance.GetChoiceKeys(our_descriptor):
choice_node = ET.SubElement(choices, 'choice')
choice_node.text = choice
def get_param_descriptor(appkey, app_instance, our_descriptor, root):
"""
update the root xml with the data of the parameter given by "our_descriptor"
"""
logger = get_OTB_log()
parameters = get_parameters()
our_type = parameters[app_instance.GetParameterType(our_descriptor)]
#get the list of mapped parameters (otb/processing)
inverted_parameters = get_inverted_parameters()
mapped_parameter = inverted_parameters[our_type]
file_parameter = retrieve_module_name(mapped_parameter)
if not file_parameter:
logger.info("Type %s is not handled yet. (%s, %s)" % (our_type, appkey, our_descriptor))
return
the_params = get_constructor_parameters_from_filename(file_parameter, mapped_parameter)
# special for default values of OpticalCalibration
if appkey == "OpticalCalibration":
if "default" in the_params:
try:
app_instance.GetParameterAsString(our_descriptor)
except RuntimeError as e:
return
param = ET.SubElement(root, 'parameter')
attrs = {'source_parameter_type': parameters[app_instance.GetParameterType(our_descriptor)]}
if appkey == "Segmentation":
if parameters[app_instance.GetParameterType(our_descriptor)] == "ParameterType_OutputFilename":
attrs = {'source_parameter_type': 'ParameterType_OutputVectorData'}
if appkey == "LSMSVectorization":
if parameters[app_instance.GetParameterType(our_descriptor)] == "ParameterType_OutputFilename":
attrs = {'source_parameter_type': 'ParameterType_OutputVectorData'}
if appkey == "SplitImage":
if parameters[app_instance.GetParameterType(our_descriptor)] == "ParameterType_OutputImage":
attrs = {'source_parameter_type': 'ParameterType_OutputFilename'}
if parameters[app_instance.GetParameterType(our_descriptor)] == "ParameterType_ListView":
if not appkey == "RadiometricIndices":
attrs = {'source_parameter_type': 'ParameterType_StringList'}
param_type = ET.SubElement(param, 'parameter_type', attrib=attrs)
param_type.text = inverted_parameters[parameters[app_instance.GetParameterType(our_descriptor)]]
if appkey == "Segmentation":
if parameters[app_instance.GetParameterType(our_descriptor)] == "ParameterType_OutputFilename":
param_type.text = "OutputVector"
if appkey == "LSMSVectorization":
if parameters[app_instance.GetParameterType(our_descriptor)] == "ParameterType_OutputFilename":
param_type.text = "OutputVector"
if appkey == "SplitImage":
if parameters[app_instance.GetParameterType(our_descriptor)] == "ParameterType_OutputImage":
param_type.text = "OutputFile"
if parameters[app_instance.GetParameterType(our_descriptor)] == "ParameterType_ListView":
if not appkey == "RadiometricIndices":
param_type.text = "ParameterString"
# {the_params = get_constructor_parameters_from_filename(file_parameter, mapped_parameter)
if len(the_params) == 0:
# if 'Output' in file_parameter:
if 'output' in file_parameter:
file_path = os.path.join(os.path.dirname(file_parameter), 'outputs.py')
the_params = get_constructor_parameters_from_filename(file_path, "Output")
if 'parameter' in file_parameter:
file_path = os.path.join(os.path.dirname(file_parameter), 'parameters.py')
the_params = (file_path)
the_params = get_constructor_parameters_from_filename(file_path, "Parameter")
if "self" in the_params:
#remove self
the_params.remove("self") # the_params[1:]
# to be identical as before !
if "isSource" in the_params:
the_params.remove("isSource")
if "showSublayersDialog" in the_params:
the_params.remove("showSublayersDialog")
if "ext" in the_params:
the_params.remove("ext")
else:
raise Exception("Unexpected constructor parameters")
key = ET.SubElement(param, 'key')
key.text = our_descriptor
is_choice_type = False
for each in the_params:
if each == "name":
name = ET.SubElement(param, 'name')
nametext = app_instance.GetParameterName(our_descriptor)
if "angle" in nametext:
name.text = nametext.replace("\xc2\xb0", "deg")
else:
name.text = app_instance.GetParameterName(our_descriptor)
if our_descriptor == "acqui.fluxnormcoeff":
pass
elif each == "description":
desc = ET.SubElement(param, 'description')
desc.text = app_instance.GetParameterDescription(our_descriptor)
elif each == "optional":
optional = ET.SubElement(param, 'optional')
optional.text = str(not app_instance.IsMandatory(our_descriptor))
elif each == "default":
done = False
reason = []
try:
default_value = str(app_instance.GetParameterAsString(our_descriptor))
done = True
except:
reason.append(traceback.format_exc())
if not done:
try:
default_value = str(app_instance.GetParameterFloat(our_descriptor))
done = True
except:
reason.append(traceback.format_exc())
if not done:
try:
default_value = str(app_instance.GetParameterInt(our_descriptor))
done = True
except:
reason.append(traceback.format_exc())
if done:
default = ET.SubElement(param, 'default')
default.text = default_value
if is_choice_type:
the_keys = [a_key for a_key in app_instance.GetChoiceKeys(our_descriptor)]
if default_value in the_keys:
default.text = str(the_keys.index(default_value))
else:
default.text = ''
else:
logger.debug("A parameter transformation failed, trying default values : for %s, %s, type %s!, conversion message: %s" % (appkey, our_descriptor, parameters[app_instance.GetParameterType(our_descriptor)], str(reason)))
the_type = parameters[app_instance.GetParameterType(our_descriptor)]
if the_type == "ParameterType_Int":
default_value = "0"
elif the_type == "ParameterType_Float":
default_value = "0.0"
elif the_type == "ParameterType_Empty":
default_value = "True"
else:
raise Exception("Unable to adapt %s, %s, %s, conversion message: %s" % (appkey, our_descriptor, parameters[app_instance.GetParameterType(our_descriptor)], str(reason)))
default = ET.SubElement(param, 'default')
default.text = default_value
else:
is_choice_type = 'Selection' in param_type.text
node = ET.SubElement(param, each)
if is_choice_type:
get_the_choices(app_instance, our_descriptor, node)
def get_default_parameter_value(app_instance, param):
parameters = get_parameters()
try:
return app_instance.GetParameterAsString(param)
except:
the_type = parameters[app_instance.GetParameterType(param)]
default_value = "0"
if the_type == "ParameterType_Int":
default_value = "0"
elif the_type == "ParameterType_Float":
default_value = "0.0"
elif the_type == "ParameterType_Empty":
default_value = "True"
return default_value
def escape_html(par):
if 'Int' in par:
return '<int32>'
if 'Float' in par:
return '<float>'
if 'Empty' in par:
return '<boolean>'
if 'Radius' in par:
return '<int32>'
if 'RAM' in par:
return '<int32>'
return '<string>'
def is_a_parameter(app_instance, param):
if app_instance.GetName() == "HaralickTextureExtraction":
if param.startswith("parameters."):
return True
if '.' in param:
return False
try:
app_instance.GetChoiceKeys(param)
return False
except:
return True
def describe_app(app_instance):
parameters = get_parameters()
result = []
with tag('html', result):
with tag('head', result):
how = """
<style type="text/css">
dl { border: 3px double #ccc; padding: 0.5em; } dt { float: left; clear: left; text-align: left; font-weight: bold; color: green; } dt:after { content: ":"; } dd { margin: 0 0 0 220px; padding: 0 0 0.5em 0; }
</style>
"""
result.append(how)
with tag('body', result):
with tag('h1', result):
result.append(app_instance.GetName())
with tag('h2', result):
result.append('Brief Description')
result.append(app_instance.GetDescription())
with tag('h2', result):
result.append('Tags')
result.append(','.join(app_instance.GetDocTags()))
with tag('h2', result):
result.append('Long Description')
result.append(app_instance.GetDocLongDescription())
with tag('h2', result):
result.append('Parameters')
params = app_instance.GetParametersKeys()
with tag('ul', result):
for param in params:
if is_a_parameter(app_instance, param):
with tag('li', result):
result.append('<b>%s -%s</b> %s ' % ('[param]', param, escape_html(parameters[app_instance.GetParameterType(param)])))
result.append('%s. Mandatory: %s. Default Value: "%s"' % (app_instance.GetParameterDescription(param), str(app_instance.IsMandatory(param)), get_default_parameter_value(app_instance, param)))
choices_tags = [each for each in params if (not is_a_parameter(app_instance, each)) and '.' not in each]
for choice in choices_tags:
result.append('<b>%s -%s</b> %s %s. Mandatory: %s. Default Value: "%s"' % ('[choice]', choice, app_instance.GetParameterDescription(choice), ','.join(app_instance.GetChoiceKeys(choice)), str(app_instance.IsMandatory(choice)), get_default_parameter_value(app_instance, choice)))
choices = app_instance.GetChoiceKeys(choice)
with tag('ul', result):
for subchoice in choices:
with tag('li', result):
result.append('<b>%s -%s</b>' % ('[group]', subchoice))
with tag('ul', result):
param_tags = [each for each in params if '.%s' % subchoice in each]
for param_tag in param_tags:
with tag('li', result):
result.append('<b>%s -%s</b> ' % ('[param]', param_tag))
result.append("%s %s. Mandatory: %s. Default Value: "%s"" % (escape_html(parameters[app_instance.GetParameterType(param_tag)]), app_instance.GetParameterDescription(param_tag), str(app_instance.IsMandatory(param_tag)), get_default_parameter_value(app_instance, param_tag)))
with tag('h2', result):
result.append('Limitations')
result.append(app_instance.GetDocLimitations())
with tag('h2', result):
result.append('Authors')
result.append(app_instance.GetDocAuthors())
with tag('h2', result):
result.append('See Also')
result.append(app_instance.GetDocSeeAlso())
with tag('h2', result):
result.append('Example of use')
result.append(app_instance.GetHtmlExample())
if app_instance.GetName() == "HaralickTextureExtraction":
index = result.index("<b>[param] -parameters</b> <string> ")
del result[index + 2]
del result[index + 1]
del result[index]
del result[index - 1]
return "".join(result)
def get_list_from_node(myet, available_app):
all_params = []
for parameter in myet.iter('parameter'):
rebuild = []
par_type = parameter.find('parameter_type').text
key = parameter.find('key').text
name = parameter.find('name').text
source_par_type = parameter.find('parameter_type').attrib['source_parameter_type']
rebuild.append(source_par_type)
rebuild.append(par_type)
rebuild.append(key)
rebuild.append(name)
for each in parameter[4:]:
if each.tag not in ["hidden"]:
if len(each.getchildren()) == 0:
if each.tag in ["default"]:
if "-" in available_app:
available_app = available_app.split("-")[0]
app_instance = otbApplication.Registry.CreateApplication(available_app)
rebuild.append(get_default_parameter_value(app_instance, key))
else:
rebuild.append(each.text)
else:
rebuild.append([item.text for item in each.iter('choice')])
all_params.append(rebuild)
return all_params
def adapt_list_to_string(c_list):
a_list = c_list[1:]
if a_list[0] in ["ParameterVector", "ParameterMultipleInput"]:
if c_list[0] == "ParameterType_InputImageList":
a_list[3] = 3
else:
a_list[3] = -1
if a_list[0] in ["ParameterRaster", "ParameterFile", "ParameterMultipleInput", "OutputRaster", "OutputFile"]:
if "Output" in a_list[0]:
a_list.append("/tmp/processing/output.tif")
else:
import os
a_list.append(os.path.join(os.path.abspath(os.curdir), "helper/QB_Toulouse_Ortho_PAN.tif"))
if a_list[0] in ["ParameterSelection"]:
pass
a_list[1] = "-%s" % a_list[1]
def mystr(par):
if isinstance(par, list):
return ";".join(par)
return str(par)
if a_list[-1] is None:
return ""
b_list = map(mystr, a_list)
b_list = [b_list[1], b_list[-1]]
res = " ".join(b_list)
return res
def get_automatic_ut_from_xml_description(the_root):
dom_model = the_root
try:
appkey = dom_model.find('key').text
cliName = dom_model.find('exec').text
if not cliName.startswith("otbcli_"):
raise Exception('Wrong client executable')
rebu = get_list_from_node(dom_model, appkey)
the_result = map(adapt_list_to_string, rebu)
ut_command = cliName + " " + " ".join(the_result)
return ut_command
except Exception as e:
ET.dump(dom_model)
raise
def list_reader(file_name, version):
tree = ET.parse(file_name)
root = tree.getroot()
nodes = [each.text for each in root.findall("./version[@id='%s']/app_name" % version)]
return nodes
def get_otb_version():
#TODO Find a way to retrieve installed otb version, force exception and parse otb-X.XX.X ?
# return "3.18"
return "5.0"
def get_white_list():
nodes = list_reader("white_list.xml", get_otb_version())
return nodes
def get_black_list():
nodes = list_reader("black_list.xml", get_otb_version())
return nodes
def create_xml_descriptors():
import os
if not os.path.exists("description"):
os.mkdir("description")
if not os.path.exists("html"):
os.mkdir("html")
logger = get_OTB_log()
white_list = get_white_list()
black_list = get_black_list()
custom_apps_available = get_customize_app_functions()
for available_app in otbApplication.Registry.GetAvailableApplications():
# try:
if 'get%s' % available_app in custom_apps_available:
if available_app in white_list and available_app not in black_list:
the_list = []
the_root = get_xml_description_from_application_name(available_app)
function_to_call = "the_list = OTBSpecific_XMLcreation.get%s(available_app,the_root)" % available_app
exec(function_to_call)
# the_list = locals()['get%s' % available_app](available_app, the_root)
if the_list:
for each_dom in the_list:
try:
ut_command = get_automatic_ut_from_xml_description(each_dom)
except:
logger.error("Unit test for command %s must be fixed: %s" % (available_app, traceback.format_exc()))
else:
logger.warning("%s is not in white list." % available_app)
else:
if available_app in white_list and available_app not in black_list:
logger.warning("There is no adaptor for %s, check white list and versions" % available_app)
# TODO Remove this default code when all apps are tested...
fh = open("description/%s.xml" % available_app, "w")
the_root = get_xml_description_from_application_name(available_app)
ET.ElementTree(the_root).write(fh)
fh.close()
try:
ut_command = get_automatic_ut_from_xml_description(the_root)
except:
logger.error("Unit test for command %s must be fixed: %s" % (available_app, traceback.format_exc()))
# except Exception, e:
# logger.error(traceback.format_exc())
def create_html_description():
logger = get_OTB_log()
if not os.path.exists("description/doc"):
os.mkdir("description/doc")
for available_app in otbApplication.Registry.GetAvailableApplications():
try:
fh = open("description/doc/%s.html" % available_app, "w")
app_instance = otbApplication.Registry.CreateApplication(available_app)
app_instance.UpdateParameters()
ct = describe_app(app_instance)
fh.write(ct)
fh.close()
except Exception as e:
logger.error(traceback.format_exc())
sub_algo = [each for each in os.listdir("description") if "-" in each and ".xml" in each]
for key in sub_algo:
shutil.copy("description/doc/%s" % key.split("-")[0] + ".html", "description/doc/%s" % key.split(".")[0] + ".html")
if __name__ == "__main__":
# Prepare the environment
import sys
import os
from qgis.core import QgsApplication
from PyQt4.QtGui import QApplication
app = QApplication([])
QgsApplication.setPrefixPath("/usr", True)
QgsApplication.initQgis()
# Prepare processing framework
from processing.core.Processing import Processing
Processing.initialize()
import OTBSpecific_XMLcreation
# try:
# import processing
# except ImportError, e:
# raise Exception("Processing must be installed and available in PYTHONPATH")
try:
import otbApplication
except ImportError as e:
raise Exception("OTB python plugins must be installed and available in PYTHONPATH")
create_xml_descriptors()
create_html_description()
# Exit applications
QgsApplication.exitQgis()
QApplication.exit()
| gpl-2.0 | 5,006,955,171,365,531,000 | 39.464632 | 323 | 0.602639 | false | 4.028027 | false | false | false |
WaveBlocks/WaveBlocksND | WaveBlocksND/LimitedHyperbolicCutShape.py | 1 | 10861 | """The WaveBlocks Project
This file contains the class for representing the hyperbolic cut
basis shape which is a special type of sparse basis set.
@author: R. Bourquin
@copyright: Copyright (C) 2012, 2013, 2014 R. Bourquin
@license: Modified BSD License
"""
from numpy import eye, vstack, integer
from WaveBlocksND.BasisShape import BasisShape
from WaveBlocksND.HyperbolicCutShape import HyperbolicCutShape
from functools import reduce
__all__ = ["LimitedHyperbolicCutShape"]
class LimitedHyperbolicCutShape(BasisShape):
r"""This class implements the hyperbolic cut basis shape which
is a special type of sparse basis set. A basis shape is essentially
all information and operations related to the set :math:`\mathfrak{K}`
of multi-indices :math:`k`. The hyperbolic cut shape in :math:`D` dimensions
with `sparsity` :math:`S` and limits :math:`K = (K_0,\ldots,K_{D-1})`
is defined as the set
.. math::
\mathfrak{K}(D, S, K) := \{ (k_0, \ldots, k_{D-1}) |
0 \leq k_d < K_d \forall d \in [0,\ldots,D-1]
\land \prod_{d=0}^{D-1}(1+k_d) \leq S \}
"""
def __init__(self, D, K, limits):
r"""
:param D: The dimension :math:`D`
:param K: The sparsity parameter :math:`S`
:param limits: The list of all limits :math:`\{K_d\}_{d=0}^{D-1}`
"""
# The dimension of K
self._dimension = D
# The sparsity parameter
self._sparsity = K
# The limits
limits = tuple(limits)
if all([int(l) > 0 for l in limits]):
self._limits = limits
else:
raise ValueError("All limits have to be positive.")
# The linear mapping k -> index for the basis
iil = self._get_index_iterator_lex()
self._lima = {k: index for index, k in enumerate(iil)}
# And the inverse mapping
self._lima_inv = {v: k for k, v in self._lima.items()}
# The basis size
self._basissize = len(self._lima)
def __str__(self):
r""":return: A string describing the basis shape :math:`\mathfrak{K}`.
"""
s = ("Hyperbolic cut basis shape of dimension "+str(self._dimension)+" and sparsity "+str(self._sparsity)+" limited at "+str(self._limits)+".")
return s
def __hash__(self):
r"""Compute a unique hash for the basis shape. In the case of hyperbolic
cut basis shapes :math:`\mathfrak{K}` the basis is fully specified by its
dimension :math:`D` and the sparsity parameter :math:`K`.
"""
return hash(("LimitedHyperbolicCutShape", self._dimension, self._sparsity, self._limits))
def __getitem__(self, k):
r"""Make map look ups.
"""
if type(k) is tuple or type(k) is list:
k = tuple(k)
assert len(k) == self._dimension
if k in self._lima:
return self._lima[k]
elif type(k) is int:
if k in self._lima_inv:
return self._lima_inv[k]
else:
raise IndexError("Wrong index type")
def __contains__(self, k):
r"""
Checks if a given multi-index :math:`k` is part of the basis set :math:`\mathfrak{K}`.
:param k: The multi-index :math:`k` we want to test.
:type k: tuple
"""
assert len(tuple(k)) == self._dimension
return tuple(k) in self._lima
def __iter__(self):
r"""Implements iteration over the multi-indices :math:`k`
of the basis set :math:`\mathfrak{K}`.
Note: The order of iteration is NOT fixed. If you need a special
iteration scheme, use :py:meth:`get_node_iterator`.
"""
# TODO: Better remove this as it may cause unexpected behaviour?
return iter(self._lima)
def contains(self, k):
r"""
Checks if a given multi-index :math:`k` is part of the basis set :math:`\mathfrak{K}`.
:param k: The multi-index :math:`k` we want to test.
:type k: tuple
"""
return tuple(k) in self._lima
def get_description(self):
r"""Return a description of this basis shape object.
A description is a ``dict`` containing all key-value pairs
necessary to reconstruct the current basis shape. A description
never contains any data.
"""
d = {}
d["type"] = "LimitedHyperbolicCutShape"
d["dimension"] = self._dimension
d["K"] = self._sparsity
d["limits"] = self._limits
return d
def extend(self, tight=True):
r"""Extend the basis shape such that (at least) all neighbours of all
boundary nodes are included in the extended basis shape.
:param tight: Whether to cut off the long tails.
:type tight: Boolean, default is ``False``
"""
D = self._dimension
K = self._sparsity
if D > 1:
# This formula is more narrow than: K = 2**(D-1) * (K+1)
# but works only for D >= 2
new_sparsity = 2**(D - 1) * K
else:
# Special casing K = 2**(D-1) * (K+1) for D = 1
new_sparsity = K + 1
if tight is True:
new_limits = tuple([l + 1 for l in self._limits])
return LimitedHyperbolicCutShape(D, new_sparsity, new_limits)
else:
return HyperbolicCutShape(D, new_sparsity)
def _get_index_iterator_lex(self):
r"""
"""
# The hyperbolic cut parameter
sparsity = self._sparsity
# Upper bounds in each dimension
bounds = self._limits[::-1]
def index_iterator_lex(S, bounds):
# Initialize a counter
z = [0 for i in range(self._dimension + 1)]
while z[self._dimension] == 0:
# Yield the current index vector
yield tuple(reversed(z[:-1]))
# Increment fastest varying bit
z[0] += 1
# Reset overflows
for d in range(self._dimension):
K = reduce(lambda x, y: x * (y + 1), z[:-1], 1)
if z[d] >= bounds[d] or K > S:
z[d] = 0
z[d + 1] += 1
return index_iterator_lex(sparsity, bounds)
def _get_index_iterator_chain(self, direction=0):
r"""
"""
# The hyperbolic cut parameter
sparsity = self._sparsity
# Upper bounds in each dimension
bounds = self._limits[::-1]
def index_iterator_chain(S, bounds, d):
D = self._dimension
# The counter
z = [0 for i in range(D + 1)]
# Iterate over all valid stencil points
while z[D] == 0:
yield tuple(reversed(z[:-1]))
# Increase index in the dimension we build the chain
z[D - d - 1] += 1
# Check if we are done with the current base point
# If yes, move base point and start a new chain
# Reset overflows
for i in range(D - d - 1, D):
K = reduce(lambda x, y: x * (y + 1), z[(D - d - 1):-1], 1)
if z[i] > bounds[i] - 1 or K > S:
z[i] = 0
z[i + 1] += 1
return index_iterator_chain(sparsity, bounds, direction)
def _get_index_iterator_mag(self):
r"""
"""
# Nodes sorted by l_1 magnitude
nodes = sorted(self._lima.keys(), key=sum)
def index_iterator_mag(nodes):
for node in nodes:
yield node
return index_iterator_mag(nodes)
def get_node_iterator(self, mode="lex", direction=None):
r"""
Returns an iterator to iterate over all basis elements :math:`k \in \mathfrak{K}`.
:param mode: The mode by which we iterate over the indices. Default is ``lex``
for lexicographical order. Supported is also ``chain``, for
the chain-like mode, details see the manual.
:type mode: string
:param direction: If iterating in `chainmode` this specifies the direction
the chains go.
:type direction: integer.
"""
if mode == "lex":
return self._get_index_iterator_lex()
elif mode == "chain":
if direction < self._dimension:
return self._get_index_iterator_chain(direction=direction)
else:
raise ValueError("Can not build iterator for this direction.")
elif mode == "mag":
return self._get_index_iterator_mag()
# TODO: Consider boundary node only iterator
else:
raise ValueError("Unknown iterator mode: {}.".format(mode))
def get_limits(self):
r"""Returns the upper limit :math:`K_d` for all directions :math:`d`.
:return: A tuple of the maximum of the multi-index in each direction.
"""
return tuple(self._limits)
def get_neighbours(self, k, selection=None, direction=None):
r"""
Returns a list of all multi-indices that are neighbours of a given
multi-index :math:`k`. A direct neighbour is defined as
:math:`(k_0, \ldots, k_d \pm 1, \ldots, k_{D-1}) \forall d \in [0 \ldots D-1]`.
:param k: The multi-index of which we want to get the neighbours.
:type k: tuple
:param selection:
:type selection: string with fixed values ``forward``, ``backward`` or ``all``.
The values ``all`` is equivalent to the value ``None`` (default).
:param direction: The direction :math:`0 \leq d < D` in which we want to find
the neighbours :math:`k \pm e_d`.
:type direction: int
:return: A list containing the pairs :math:`(d, k^\prime)`.
"""
assert len(tuple(k)) == self._dimension
# First build a list of potential neighbours
I = eye(self._dimension, dtype=integer)
ki = vstack(k)
# Forward and backward direct neighbours
nbfw = ki + I
nbbw = ki - I
# Keep only the valid ones
nbh = []
if direction is not None:
directions = [direction]
else:
directions = range(self._dimension)
for d in directions:
nfw = tuple(nbfw[:, d])
nbw = tuple(nbbw[:, d])
# TODO: Try to simplify these nested if blocks
if selection in ("backward", "all", None):
if nbw in self:
nbh.append((d, nbw))
if selection in ("forward", "all", None):
if nfw in self:
nbh.append((d, nfw))
return nbh
| bsd-3-clause | 337,433,239,254,320,450 | 33.154088 | 151 | 0.54286 | false | 3.940856 | false | false | false |
davidbgk/udata | udata/mail.py | 2 | 2032 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from contextlib import contextmanager
from blinker import signal
from flask import current_app
from flask_mail import Mail, Message
from udata import theme, i18n
log = logging.getLogger(__name__)
mail = Mail()
mail_sent = signal('mail-sent')
class FakeMailer(object):
'''Display sent mail in logging output'''
def send(self, msg):
log.debug(msg.body)
log.debug(msg.html)
mail_sent.send(msg)
@contextmanager
def dummyconnection(*args, **kw):
"""Allow to test email templates rendering without actually send emails."""
yield FakeMailer()
def init_app(app):
mail.init_app(app)
def send(subject, recipients, template_base, **kwargs):
'''
Send a given email to multiple recipients.
User prefered language is taken in account.
To translate the subject in the right language, you should ugettext_lazy
'''
sender = kwargs.pop('sender', None)
if not isinstance(recipients, (list, tuple)):
recipients = [recipients]
debug = current_app.config.get('DEBUG', False)
send_mail = current_app.config.get('SEND_MAIL', not debug)
connection = send_mail and mail.connect or dummyconnection
with connection() as conn:
for recipient in recipients:
lang = i18n._default_lang(recipient)
with i18n.language(lang):
log.debug(
'Sending mail "%s" to recipient "%s"', subject, recipient)
msg = Message(subject, sender=sender,
recipients=[recipient.email])
msg.body = theme.render(
'mail/{0}.txt'.format(template_base), subject=subject,
sender=sender, recipient=recipient, **kwargs)
msg.html = theme.render(
'mail/{0}.html'.format(template_base), subject=subject,
sender=sender, recipient=recipient, **kwargs)
conn.send(msg)
| agpl-3.0 | 5,569,342,784,436,271,000 | 28.449275 | 79 | 0.621555 | false | 4.189691 | false | false | false |
vbrown608/csa-match | src/load_data.py | 1 | 1782 | import csv
import logging
import os
from models import *
import config
from google.appengine.ext import ndb
from google.appengine.api import search
def clearAllData(sample_data=True):
"""
Clear all CSAs and Sites from the datastore.
Clear all documents from the search index.
"""
logging.info('Clearing datastore')
csa_keys = CSA.query().fetch(keys_only=True)
result = ndb.delete_multi(csa_keys)
site_keys = Site.query().fetch(keys_only=True)
ndb.delete_multi(site_keys)
logging.info('Clearing search index')
doc_index = search.Index(name=config.SITE_INDEX_NAME)
try:
while True:
# until no more documents, get a list of documents,
# constraining the returned objects to contain only the doc ids,
# extract the doc ids, and delete the docs.
document_ids = [document.doc_id
for document in doc_index.get_range(ids_only=True)]
if not document_ids:
break
doc_index.delete(document_ids)
except search.Error:
logging.exception("Error removing documents:")
def loadFromCSV():
logging.info('Loading CSA data')
datafile = os.path.join('data', config.CSA_DATA)
reader = csv.DictReader(
open(datafile, 'rU'),
['id', 'name', 'description'])
for row in reader:
csa = CSA(id = row['id'], name = row['name'], description = row['description'], url = 'foo')
csa.put()
logging.info('Loading Site data')
datafile = os.path.join('data', config.SITE_DATA)
reader = csv.DictReader(
open(datafile, 'rU'),
['csa', 'name', 'address', 'lat', 'lng'])
for row in reader:
csa_key = ndb.Key(CSA, row['csa'])
site = Site(csa = csa_key, name=row['name'], address = row['address'], lat = float(row['lat']), lng = float(row['lng']))
site.put()
| gpl-2.0 | -3,003,912,828,793,456,600 | 28.7 | 124 | 0.654882 | false | 3.426923 | false | false | false |