code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# -*- coding: utf-8 -*-
#
import qrcode
try:
#pylint: disable = E, W, R, C
from PyQt6.QtGui import QImage, QPixmap,QPainter # @UnusedImport @Reimport @UnresolvedImport
from PyQt6.QtCore import Qt # @UnusedImport @Reimport @UnresolvedImport
except Exception:
#pylint: disable = E, W, R, C
from PyQt5.QtGui import QImage, QPixmap,QPainter # @UnusedImport @Reimport @UnresolvedImport
from PyQt5.QtCore import Qt # @UnusedImport @Reimport @UnresolvedImport
##########################################################################
##################### QR Image #####################################
##########################################################################
class QRImage(qrcode.image.base.BaseImage):
def new_image(self, **_kwargs):
img = QImage(self.pixel_size, self.pixel_size, QImage.Format.Format_RGB16)
img.fill(Qt.GlobalColor.white)
return img
def pixmap(self):
return QPixmap.fromImage(self.get_image())
def drawrect(self, row, col):
painter = QPainter(self.get_image())
painter.fillRect(
(col + self.border) * self.box_size,
(row + self.border) * self.box_size,
self.box_size, self.box_size,
Qt.GlobalColor.black)
def save(self, stream, kind=None):
pass
def process(self):
pass
def drawrect_context(self, row, col, active, context):
pass
def QRlabel(url_str):
qr = qrcode.QRCode(
version=None, # 1,
error_correction=qrcode.constants.ERROR_CORRECT_L,
box_size=4,
border=1,
image_factory=QRImage)
qr.add_data(url_str)
qr.make(fit=True)
return qr | artisan-roaster-scope/artisan | src/artisanlib/qrcode.py | Python | gpl-3.0 | 1,715 |
import cv2
import numpy as np
from matplotlib import pyplot as plt
from time import sleep
from camera import Camera
from front_camera import FrontCamera
from rear_camera import RearCamera
run_front = True
run_rear = True
white = (255, 255, 255)
black = (0, 0, 0)
if run_front:
front_camera = Camera(src=1)
front_cam_processing = FrontCamera(front_camera)
if run_rear:
back_camera = Camera(src=0)
rear_cam_processing = RearCamera(back_camera)
def drawPoints(image, point_array):
for point in point_array:
cv2.circle(image, point, 2, white, -1)
def main():
while(True):
sleep(.1)
# points = []
# points.append((20,20))
# points.append((100, 100))
# drawPoints(front, points)
# edges = cv2.Canny(back, 220, 210)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cv2.destroyAllWindows()
if __name__ == "__main__":
main()
| jbpacker/lolcam | run.py | Python | mit | 938 |
import logging
try:
import torch
from transformers import pipeline
except ImportError:
# No installation required if not using this function
pass
from nlpaug.model.lang_models import LanguageModels
class FmTransformers(LanguageModels):
def __init__(self, model_path='bert-base-uncased', model_type='bert', top_k=None, device='cuda',
max_length=300, batch_size=32, silence=True):
super().__init__(device, model_type=model_type, top_k=top_k, silence=silence)
try:
from transformers import pipeline
except ModuleNotFoundError:
raise ModuleNotFoundError('Missed transformers library. Install transfomers by `pip install transformers`')
self.max_length = max_length
self.batch_size = batch_size
self.model_path = model_path
device = self.convert_device(device)
top_k = top_k if top_k else 5
if silence:
# Transformers thrown an warning regrading to weight initialization. It is expected
orig_log_level = logging.getLogger('transformers.' + 'modeling_utils').getEffectiveLevel()
logging.getLogger('transformers.' + 'modeling_utils').setLevel(logging.ERROR)
self.model = pipeline("fill-mask", model=model_path, device=device, top_k=top_k)
logging.getLogger('transformers.' + 'modeling_utils').setLevel(orig_log_level)
else:
self.model = pipeline("fill-mask", model=model_path, device=device, top_k=top_k)
def to(self, device):
self.model.model.to(device)
def get_device(self):
return str(self.model.device)
def get_tokenizer(self):
return self.model.tokenizer
def get_model(self):
return self.model.model
def get_max_num_token(self):
return self.model.model.config.max_position_embeddings - 2 * 5
def is_skip_candidate(self, candidate):
return candidate.startswith(self.get_subword_prefix())
def token2id(self, token):
# Iseue 181: TokenizerFast have convert_tokens_to_ids but not convert_tokens_to_id
if 'TokenizerFast' in self.tokenizer.__class__.__name__:
# New transformers API
return self.model.tokenizer.convert_tokens_to_ids(token)
else:
# Old transformers API
return self.model.tokenizer._convert_token_to_id(token)
def id2token(self, _id):
return self.model.tokenizer._convert_id_to_token(_id)
def predict(self, texts, target_words=None, n=1):
results = []
predict_results = []
with torch.no_grad():
for i in range(0, len(texts), self.batch_size):
predict_result = self.model(texts[i:i+self.batch_size], num_workers=1)
if isinstance(predict_result, list) and len(predict_result) > 0:
if isinstance(predict_result[0], list):
predict_results.extend(predict_result)
else:
predict_results.extend([predict_result])
for result in predict_results:
temp_results = []
for r in result:
token = r['token_str']
if self.model_type in ['bert'] and token.startswith('##'):
continue
# subword came without space for roberta but not normal subowrd prefix
if self.model_type in ['roberta', 'bart'] and not token.startswith(' '):
continue
temp_results.append(token)
results.append(temp_results)
return results
| makcedward/nlpaug | nlpaug/model/lang_models/fill_mask_transformers.py | Python | mit | 3,614 |
import unittest
import os
import copy
import scipy
import SloppyCell.Utility as Utility
from SloppyCell.ReactionNetworks import *
import SloppyCell.daskr
from SloppyCell.daskr import daeint
# Load the fast reaction example from the SBML semantic test suite.
# To avoid extra dependencies on libsbml, we use verions built by SloppyCell.
from AlgTestNets import algebraic_net, algebraic_net_manual
tlist_algebraic_net = scipy.array([0] + [0.8*x for x in range(1, 51)])
class test_crossReferences(unittest.TestCase):
# Introducing two helper functions for the integration test, which is
# repeated a few times below.
def integration_tests(self, net):
""" integrates the network to timepoints with known data which we test """
traj = Dynamics.integrate(net, tlist_algebraic_net)
self.data_test_cases(traj)
def data_test_cases(self, traj):
""" do the actual data comparison """
self.assertAlmostEqual(traj.get_var_val('X0',4.8),
0.618783392, 5)
self.assertAlmostEqual(traj.get_var_val('X1',21.6),
0.653837775, 5)
self.assertAlmostEqual(traj.get_var_val('T', 29.6),
0.138253942, 5)
self.assertAlmostEqual(traj.get_var_val('S1', 40.0),
0.018207409, 5)
self.assertAlmostEqual(traj.get_var_val('S2', 16.8),
0.210750878, 5)
def test_manual_cross_refs(self):
""" Test that a network with manual call to makeCrossReferences gets \
correct reults """
net = algebraic_net_manual.copy()
net._makeCrossReferences()
self.integration_tests(net)
def test_manual_without_makeCrossReferences(self):
""" Test that after setting _manualCrossReferences_flag to True the \
compilation will fail if makeCrossReferences is NOT called """
net = algebraic_net_manual.copy()
self.assertRaises(AttributeError, Dynamics.integrate, net, tlist_algebraic_net)
def test_manual_without_makeCrossReferences_2(self):
""" Test that after setting _manualCrossReferences_flag to True the \
compilation will fail if makeCrossReferences is NOT called. Also test that after \
makeCrossReferences() is subsequently called the integration works."""
net = algebraic_net_manual.copy()
self.assertRaises(AttributeError, Dynamics.integrate, net, tlist_algebraic_net)
net._makeCrossReferences()
self.integration_tests(net)
def test_manual_off(self):
""" Test that after setting _manualCrossReferences_flag to True and then back \
integration still works """
net = algebraic_net.copy()
self.assertEqual(net._manualCrossReferences_flag, False)
net._manualCrossReferences(flag=True)
self.assertEqual(net._manualCrossReferences_flag, True)
net._manualCrossReferences(flag=False)
self.assertEqual(net._manualCrossReferences_flag, False)
self.integration_tests(net)
def test_manual_call_makeCrossReferences(self):
""" Test that for a network where cross references are manual, once \
_makeCrossReferences is called then it will be automatic."""
net = algebraic_net_manual.copy()
self.assertEqual(algebraic_net_manual._manualCrossReferences_flag, True)
self.assertEqual(net._manualCrossReferences_flag, True)
net._makeCrossReferences()
self.assertEqual(len(net.GetParameters()), 3)
self.assertEqual(net._manualCrossReferences_flag, False)
# Try adding a parameter to make sure the cross references are made automatically.
net.addParameter('k_new', 1e-3)
self.assertEqual(len(net.GetParameters()), 4)
self.integration_tests(net)
################################################################################
suite = unittest.makeSuite(test_crossReferences)
if __name__ == '__main__':
unittest.main()
| GutenkunstLab/SloppyCell | test/test_crossReferences.py | Python | bsd-3-clause | 4,044 |
# Copyright (c) 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
from oslo_log import log as logging
from webob import exc
from murano.common.i18n import _, _LI
from murano.db import models
from murano.db.services import sessions
from murano.db import session as db_session
from murano.services import states
LOG = logging.getLogger(__name__)
def check_env(request, environment_id):
unit = db_session.get_session()
environment = unit.query(models.Environment).get(environment_id)
if environment is None:
msg = _('Environment with id {0}'
' not found').format(environment_id)
LOG.warning(msg)
raise exc.HTTPNotFound(explanation=msg)
if hasattr(request, 'context'):
if environment.tenant_id != request.context.tenant:
msg = _('User is not authorized to access'
' these tenant resources')
LOG.warning(msg)
raise exc.HTTPForbidden(explanation=msg)
return environment
def check_session(request, environment_id, session, session_id):
"""Validate, that a session is ok."""
if session is None:
msg = _('Session <SessionId {id}> is not found').format(id=session_id)
LOG.error(msg)
raise exc.HTTPNotFound(explanation=msg)
if session.environment_id != environment_id:
msg = _('Session <SessionId {session_id}> is not tied '
'with Environment <EnvId {environment_id}>').format(
session_id=session_id,
environment_id=environment_id)
LOG.error(msg)
raise exc.HTTPNotFound(explanation=msg)
check_env(request, environment_id)
def verify_env(func):
@functools.wraps(func)
def __inner(self, request, environment_id, *args, **kwargs):
check_env(request, environment_id)
return func(self, request, environment_id, *args, **kwargs)
return __inner
def verify_env_template(func):
@functools.wraps(func)
def __inner(self, request, env_template_id, *args, **kwargs):
unit = db_session.get_session()
template = unit.query(models.EnvironmentTemplate).get(env_template_id)
if template is None:
LOG.info(_LI("Environment Template with id '{0}' not found").
format(env_template_id))
raise exc.HTTPNotFound()
if hasattr(request, 'context'):
if template.tenant_id != request.context.tenant:
LOG.info(_LI('User is not authorized to access '
'this tenant resources'))
raise exc.HTTPUnauthorized()
return func(self, request, env_template_id, *args, **kwargs)
return __inner
def verify_session(func):
@functools.wraps(func)
def __inner(self, request, *args, **kwargs):
if hasattr(request, 'context') and not request.context.session:
LOG.info(_LI('Session is required for this call'))
raise exc.HTTPForbidden()
session_id = request.context.session
unit = db_session.get_session()
session = unit.query(models.Session).get(session_id)
if session is None:
LOG.info(_LI('Session <SessionId {0}> '
'is not found').format(session_id))
raise exc.HTTPForbidden()
if not sessions.SessionServices.validate(session):
LOG.info(_LI('Session <SessionId {0}> '
'is invalid').format(session_id))
raise exc.HTTPForbidden()
if session.state == states.SessionState.DEPLOYING:
LOG.info(_LI('Session <SessionId {0}> is already in '
'deployment state').format(session_id))
raise exc.HTTPForbidden()
return func(self, request, *args, **kwargs)
return __inner
| sajuptpm/murano | murano/utils.py | Python | apache-2.0 | 4,377 |
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Middleware provided and used by Horizon.
"""
import json
import logging
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.views import redirect_to_login
from django.contrib import messages as django_messages
from django import http
from django import shortcuts
from django.utils.encoding import iri_to_uri
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from openstack_auth import views as auth_views
from horizon import exceptions
from horizon.utils import functions as utils
LOG = logging.getLogger(__name__)
class HorizonMiddleware(object):
"""The main Horizon middleware class. Required for use of Horizon."""
logout_reason = None
def _logout(self, request, login_url=None, message=None, status='success'):
"""Logout a user and display a logout message."""
response = auth_views.logout(request, login_url)
if message is not None:
self.logout_reason = message
utils.add_logout_reason(request, response, message, status)
return response
def process_request(self, request):
"""Adds data necessary for Horizon to function to the request."""
request.horizon = {'dashboard': None,
'panel': None,
'async_messages': []}
if not hasattr(request, "user") or not request.user.is_authenticated():
# proceed no further if the current request is already known
# not to be authenticated
# it is CRITICAL to perform this check as early as possible
# to avoid creating too many sessions
return None
if request.is_ajax():
# if the request is Ajax we do not want to proceed, as clients can
# 1) create pages with constant polling, which can create race
# conditions when a page navigation occurs
# 2) might leave a user seemingly left logged in forever
# 3) thrashes db backed session engines with tons of changes
return None
# If we use cookie-based sessions, check that the cookie size does not
# reach the max size accepted by common web browsers.
if (
settings.SESSION_ENGINE ==
'django.contrib.sessions.backends.signed_cookies'
):
max_cookie_size = getattr(
settings, 'SESSION_COOKIE_MAX_SIZE', None)
session_cookie_name = getattr(
settings, 'SESSION_COOKIE_NAME', None)
session_key = request.COOKIES.get(session_cookie_name)
if max_cookie_size is not None and session_key is not None:
cookie_size = sum((
len(key) + len(value)
for key, value in request.COOKIES.items()
))
if cookie_size >= max_cookie_size:
LOG.error(
'Total Cookie size for user_id: %(user_id)s is '
'%(cookie_size)sB >= %(max_cookie_size)sB. '
'You need to configure file-based or database-backed '
'sessions instead of cookie-based sessions: '
'http://docs.openstack.org/developer/horizon/topics/'
'deployment.html#session-storage',
{
'user_id': request.session.get(
'user_id', 'Unknown'),
'cookie_size': cookie_size,
'max_cookie_size': max_cookie_size,
}
)
tz = request.session.get('django_timezone')
if tz:
timezone.activate(tz)
def process_exception(self, request, exception):
"""Catches internal Horizon exception classes such as NotAuthorized,
NotFound and Http302 and handles them gracefully.
"""
if isinstance(exception, (exceptions.NotAuthorized,
exceptions.NotAuthenticated)):
auth_url = settings.LOGIN_URL
next_url = iri_to_uri(request.get_full_path())
if next_url != auth_url:
field_name = REDIRECT_FIELD_NAME
else:
field_name = None
login_url = request.build_absolute_uri(auth_url)
response = redirect_to_login(next_url, login_url=login_url,
redirect_field_name=field_name)
if isinstance(exception, exceptions.NotAuthorized):
logout_reason = _("Unauthorized. Please try logging in again.")
utils.add_logout_reason(request, response, logout_reason,
'error')
# delete messages, created in get_data() method
# since we are going to redirect user to the login page
response.delete_cookie('messages')
if request.is_ajax():
response_401 = http.HttpResponse(status=401)
response_401['X-Horizon-Location'] = response['location']
return response_401
return response
# If an internal "NotFound" error gets this far, return a real 404.
if isinstance(exception, exceptions.NotFound):
raise http.Http404(exception)
if isinstance(exception, exceptions.Http302):
# TODO(gabriel): Find a way to display an appropriate message to
# the user *on* the login form...
return shortcuts.redirect(exception.location)
@staticmethod
def copy_headers(src, dst, headers):
for header in headers:
dst[header] = src[header]
def process_response(self, request, response):
"""Convert HttpResponseRedirect to HttpResponse if request is via ajax
to allow ajax request to redirect url
"""
if request.is_ajax() and hasattr(request, 'horizon'):
queued_msgs = request.horizon['async_messages']
if type(response) == http.HttpResponseRedirect:
# Drop our messages back into the session as per usual so they
# don't disappear during the redirect. Not that we explicitly
# use django's messages methods here.
for tag, message, extra_tags in queued_msgs:
getattr(django_messages, tag)(request, message, extra_tags)
if response['location'].startswith(settings.LOGOUT_URL):
redirect_response = http.HttpResponse(status=401)
# This header is used for handling the logout in JS
redirect_response['logout'] = True
if self.logout_reason is not None:
utils.add_logout_reason(
request, redirect_response, self.logout_reason,
'error')
else:
redirect_response = http.HttpResponse()
# Use a set while checking if we want a cookie's attributes
# copied
cookie_keys = {'max_age', 'expires', 'path', 'domain',
'secure', 'httponly', 'logout_reason'}
# Copy cookies from HttpResponseRedirect towards HttpResponse
for cookie_name, cookie in response.cookies.items():
cookie_kwargs = dict((
(key, value) for key, value in cookie.items()
if key in cookie_keys and value
))
redirect_response.set_cookie(
cookie_name, cookie.value, **cookie_kwargs)
redirect_response['X-Horizon-Location'] = response['location']
upload_url_key = 'X-File-Upload-URL'
if upload_url_key in response:
self.copy_headers(response, redirect_response,
(upload_url_key, 'X-Auth-Token'))
return redirect_response
if queued_msgs:
# TODO(gabriel): When we have an async connection to the
# client (e.g. websockets) this should be pushed to the
# socket queue rather than being sent via a header.
# The header method has notable drawbacks (length limits,
# etc.) and is not meant as a long-term solution.
response['X-Horizon-Messages'] = json.dumps(queued_msgs)
return response
| kogotko/carburetor | horizon/middleware/base.py | Python | apache-2.0 | 9,397 |
# Authors: Chris Holdgraf <choldgraf@gmail.com>
#
# License: BSD (3-clause)
import os.path as op
import pytest
import numpy as np
from numpy import einsum
from numpy.fft import rfft, irfft
from numpy.testing import assert_array_equal, assert_allclose, assert_equal
from mne.utils import requires_sklearn
from mne.decoding import ReceptiveField, TimeDelayingRidge
from mne.decoding.receptive_field import (_delay_time_series, _SCORERS,
_times_to_delays, _delays_to_slice)
from mne.decoding.time_delaying_ridge import (_compute_reg_neighbors,
_compute_corrs)
data_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname = op.join(data_dir, 'test_raw.fif')
event_name = op.join(data_dir, 'test-eve.fif')
tmin, tmax = -0.1, 0.5
event_id = dict(aud_l=1, vis_l=3)
# Loading raw data
n_jobs_test = (1, 'cuda')
def test_compute_reg_neighbors():
"""Test fast calculation of laplacian regularizer."""
for reg_type in (
('ridge', 'ridge'),
('ridge', 'laplacian'),
('laplacian', 'ridge'),
('laplacian', 'laplacian')):
for n_ch_x, n_delays in (
(1, 1), (1, 2), (2, 1), (1, 3), (3, 1), (1, 4), (4, 1),
(2, 2), (2, 3), (3, 2), (3, 3),
(2, 4), (4, 2), (3, 4), (4, 3), (4, 4),
(5, 4), (4, 5), (5, 5),
(20, 9), (9, 20)):
for normed in (True, False):
reg_direct = _compute_reg_neighbors(
n_ch_x, n_delays, reg_type, 'direct', normed=normed)
reg_csgraph = _compute_reg_neighbors(
n_ch_x, n_delays, reg_type, 'csgraph', normed=normed)
assert_allclose(
reg_direct, reg_csgraph, atol=1e-7,
err_msg='%s: %s' % (reg_type, (n_ch_x, n_delays)))
@requires_sklearn
def test_rank_deficiency():
"""Test signals that are rank deficient."""
# See GH#4253
from sklearn.linear_model import Ridge
N = 256
fs = 1.
tmin, tmax = -50, 100
reg = 0.1
rng = np.random.RandomState(0)
eeg = rng.randn(N, 1)
eeg *= 100
eeg = rfft(eeg, axis=0)
eeg[N // 4:] = 0 # rank-deficient lowpass
eeg = irfft(eeg, axis=0)
win = np.hanning(N // 8)
win /= win.mean()
y = np.apply_along_axis(np.convolve, 0, eeg, win, mode='same')
y += rng.randn(*y.shape) * 100
for est in (Ridge(reg), reg):
rf = ReceptiveField(tmin, tmax, fs, estimator=est, patterns=True)
rf.fit(eeg, y)
pred = rf.predict(eeg)
assert_equal(y.shape, pred.shape)
corr = np.corrcoef(y.ravel(), pred.ravel())[0, 1]
assert corr > 0.995
def test_time_delay():
"""Test that time-delaying w/ times and samples works properly."""
# Explicit delays + sfreq
X = np.random.RandomState(0).randn(1000, 2)
assert (X == 0).sum() == 0 # need this for later
test_tlims = [
((1, 2), 1),
((1, 1), 1),
((0, 2), 1),
((0, 1), 1),
((0, 0), 1),
((-1, 2), 1),
((-1, 1), 1),
((-1, 0), 1),
((-1, -1), 1),
((-2, 2), 1),
((-2, 1), 1),
((-2, 0), 1),
((-2, -1), 1),
((-2, -1), 1),
((0, .2), 10),
((-.1, .1), 10)]
for (tmin, tmax), isfreq in test_tlims:
# sfreq must be int/float
with pytest.raises(TypeError, match='`sfreq` must be an instance of'):
_delay_time_series(X, tmin, tmax, sfreq=[1])
# Delays must be int/float
with pytest.raises(TypeError, match='.*complex.*'):
_delay_time_series(X, np.complex128(tmin), tmax, 1)
# Make sure swapaxes works
start, stop = int(round(tmin * isfreq)), int(round(tmax * isfreq)) + 1
n_delays = stop - start
X_delayed = _delay_time_series(X, tmin, tmax, isfreq)
assert_equal(X_delayed.shape, (1000, 2, n_delays))
# Make sure delay slice is correct
delays = _times_to_delays(tmin, tmax, isfreq)
assert_array_equal(delays, np.arange(start, stop))
keep = _delays_to_slice(delays)
expected = np.where((X_delayed != 0).all(-1).all(-1))[0]
got = np.arange(len(X_delayed))[keep]
assert_array_equal(got, expected)
assert X_delayed[keep].shape[-1] > 0
assert (X_delayed[keep] == 0).sum() == 0
del_zero = int(round(-tmin * isfreq))
for ii in range(-2, 3):
idx = del_zero + ii
err_msg = '[%s,%s] (%s): %s %s' % (tmin, tmax, isfreq, ii, idx)
if 0 <= idx < X_delayed.shape[-1]:
if ii == 0:
assert_array_equal(X_delayed[:, :, idx], X,
err_msg=err_msg)
elif ii < 0: # negative delay
assert_array_equal(X_delayed[:ii, :, idx], X[-ii:, :],
err_msg=err_msg)
assert_array_equal(X_delayed[ii:, :, idx], 0.)
else:
assert_array_equal(X_delayed[ii:, :, idx], X[:-ii, :],
err_msg=err_msg)
assert_array_equal(X_delayed[:ii, :, idx], 0.)
@pytest.mark.parametrize('n_jobs', n_jobs_test)
@requires_sklearn
def test_receptive_field_basic(n_jobs):
"""Test model prep and fitting."""
from sklearn.linear_model import Ridge
# Make sure estimator pulling works
mod = Ridge()
rng = np.random.RandomState(1337)
# Test the receptive field model
# Define parameters for the model and simulate inputs + weights
tmin, tmax = -10., 0
n_feats = 3
rng = np.random.RandomState(0)
X = rng.randn(10000, n_feats)
w = rng.randn(int((tmax - tmin) + 1) * n_feats)
# Delay inputs and cut off first 4 values since they'll be cut in the fit
X_del = np.concatenate(
_delay_time_series(X, tmin, tmax, 1.).transpose(2, 0, 1), axis=1)
y = np.dot(X_del, w)
# Fit the model and test values
feature_names = ['feature_%i' % ii for ii in [0, 1, 2]]
rf = ReceptiveField(tmin, tmax, 1, feature_names, estimator=mod,
patterns=True)
rf.fit(X, y)
assert_array_equal(rf.delays_, np.arange(tmin, tmax + 1))
y_pred = rf.predict(X)
assert_allclose(y[rf.valid_samples_], y_pred[rf.valid_samples_], atol=1e-2)
scores = rf.score(X, y)
assert scores > .99
assert_allclose(rf.coef_.T.ravel(), w, atol=1e-3)
# Make sure different input shapes work
rf.fit(X[:, np.newaxis:], y[:, np.newaxis])
rf.fit(X, y[:, np.newaxis])
with pytest.raises(ValueError, match='If X has 3 .* y must have 2 or 3'):
rf.fit(X[..., np.newaxis], y)
with pytest.raises(ValueError, match='X must be shape'):
rf.fit(X[:, 0], y)
with pytest.raises(ValueError, match='X and y do not have the same n_epo'):
rf.fit(X[:, np.newaxis], np.tile(y[:, np.newaxis, np.newaxis],
[1, 2, 1]))
with pytest.raises(ValueError, match='X and y do not have the same n_tim'):
rf.fit(X, y[:-2])
with pytest.raises(ValueError, match='n_features in X does not match'):
rf.fit(X[:, :1], y)
# auto-naming features
feature_names = ['feature_%s' % ii for ii in [0, 1, 2]]
rf = ReceptiveField(tmin, tmax, 1, estimator=mod,
feature_names=feature_names)
assert_equal(rf.feature_names, feature_names)
rf = ReceptiveField(tmin, tmax, 1, estimator=mod)
rf.fit(X, y)
assert_equal(rf.feature_names, None)
# Float becomes ridge
rf = ReceptiveField(tmin, tmax, 1, ['one', 'two', 'three'], estimator=0)
str(rf) # repr works before fit
rf.fit(X, y)
assert isinstance(rf.estimator_, TimeDelayingRidge)
str(rf) # repr works after fit
rf = ReceptiveField(tmin, tmax, 1, ['one'], estimator=0)
rf.fit(X[:, [0]], y)
str(rf) # repr with one feature
# Should only accept estimators or floats
with pytest.raises(ValueError, match='`estimator` must be a float or'):
ReceptiveField(tmin, tmax, 1, estimator='foo').fit(X, y)
with pytest.raises(ValueError, match='`estimator` must be a float or'):
ReceptiveField(tmin, tmax, 1, estimator=np.array([1, 2, 3])).fit(X, y)
with pytest.raises(ValueError, match='tmin .* must be at most tmax'):
ReceptiveField(5, 4, 1).fit(X, y)
# scorers
for key, val in _SCORERS.items():
rf = ReceptiveField(tmin, tmax, 1, ['one'],
estimator=0, scoring=key, patterns=True)
rf.fit(X[:, [0]], y)
y_pred = rf.predict(X[:, [0]]).T.ravel()[:, np.newaxis]
assert_allclose(val(y[:, np.newaxis], y_pred,
multioutput='raw_values'),
rf.score(X[:, [0]], y), rtol=1e-2)
with pytest.raises(ValueError, match='inputs must be shape'):
_SCORERS['corrcoef'](y.ravel(), y_pred, multioutput='raw_values')
# Need correct scorers
with pytest.raises(ValueError, match='scoring must be one of'):
ReceptiveField(tmin, tmax, 1., scoring='foo').fit(X, y)
@pytest.mark.parametrize('n_jobs', n_jobs_test)
def test_time_delaying_fast_calc(n_jobs):
"""Test time delaying and fast calculations."""
X = np.array([[1, 2, 3], [5, 7, 11]]).T
# all negative
smin, smax = 1, 2
X_del = _delay_time_series(X, smin, smax, 1.)
# (n_times, n_features, n_delays) -> (n_times, n_features * n_delays)
X_del.shape = (X.shape[0], -1)
expected = np.array([[0, 1, 2], [0, 0, 1], [0, 5, 7], [0, 0, 5]]).T
assert_allclose(X_del, expected)
Xt_X = np.dot(X_del.T, X_del)
expected = [[5, 2, 19, 10], [2, 1, 7, 5], [19, 7, 74, 35], [10, 5, 35, 25]]
assert_allclose(Xt_X, expected)
x_xt = _compute_corrs(X, np.zeros((X.shape[0], 1)), smin, smax + 1)[0]
assert_allclose(x_xt, expected)
# all positive
smin, smax = -2, -1
X_del = _delay_time_series(X, smin, smax, 1.)
X_del.shape = (X.shape[0], -1)
expected = np.array([[3, 0, 0], [2, 3, 0], [11, 0, 0], [7, 11, 0]]).T
assert_allclose(X_del, expected)
Xt_X = np.dot(X_del.T, X_del)
expected = [[9, 6, 33, 21], [6, 13, 22, 47],
[33, 22, 121, 77], [21, 47, 77, 170]]
assert_allclose(Xt_X, expected)
x_xt = _compute_corrs(X, np.zeros((X.shape[0], 1)), smin, smax + 1)[0]
assert_allclose(x_xt, expected)
# both sides
smin, smax = -1, 1
X_del = _delay_time_series(X, smin, smax, 1.)
X_del.shape = (X.shape[0], -1)
expected = np.array([[2, 3, 0], [1, 2, 3], [0, 1, 2],
[7, 11, 0], [5, 7, 11], [0, 5, 7]]).T
assert_allclose(X_del, expected)
Xt_X = np.dot(X_del.T, X_del)
expected = [[13, 8, 3, 47, 31, 15],
[8, 14, 8, 29, 52, 31],
[3, 8, 5, 11, 29, 19],
[47, 29, 11, 170, 112, 55],
[31, 52, 29, 112, 195, 112],
[15, 31, 19, 55, 112, 74]]
assert_allclose(Xt_X, expected)
x_xt = _compute_corrs(X, np.zeros((X.shape[0], 1)), smin, smax + 1)[0]
assert_allclose(x_xt, expected)
# slightly harder to get the non-Toeplitz correction correct
X = np.array([[1, 2, 3, 5]]).T
smin, smax = 0, 3
X_del = _delay_time_series(X, smin, smax, 1.)
X_del.shape = (X.shape[0], -1)
expected = np.array([[1, 2, 3, 5], [0, 1, 2, 3],
[0, 0, 1, 2], [0, 0, 0, 1]]).T
assert_allclose(X_del, expected)
Xt_X = np.dot(X_del.T, X_del)
expected = [[39, 23, 13, 5], [23, 14, 8, 3], [13, 8, 5, 2], [5, 3, 2, 1]]
assert_allclose(Xt_X, expected)
x_xt = _compute_corrs(X, np.zeros((X.shape[0], 1)), smin, smax + 1)[0]
assert_allclose(x_xt, expected)
# even worse
X = np.array([[1, 2, 3], [5, 7, 11]]).T
smin, smax = 0, 2
X_del = _delay_time_series(X, smin, smax, 1.)
X_del.shape = (X.shape[0], -1)
expected = np.array([[1, 2, 3], [0, 1, 2], [0, 0, 1],
[5, 7, 11], [0, 5, 7], [0, 0, 5]]).T
assert_allclose(X_del, expected)
Xt_X = np.dot(X_del.T, X_del)
expected = np.array([[14, 8, 3, 52, 31, 15],
[8, 5, 2, 29, 19, 10],
[3, 2, 1, 11, 7, 5],
[52, 29, 11, 195, 112, 55],
[31, 19, 7, 112, 74, 35],
[15, 10, 5, 55, 35, 25]])
assert_allclose(Xt_X, expected)
x_xt = _compute_corrs(X, np.zeros((X.shape[0], 1)), smin, smax + 1)[0]
assert_allclose(x_xt, expected)
# And a bunch of random ones for good measure
rng = np.random.RandomState(0)
X = rng.randn(25, 3)
y = np.empty((25, 2))
vals = (0, -1, 1, -2, 2, -11, 11)
for smax in vals:
for smin in vals:
if smin > smax:
continue
for ii in range(X.shape[1]):
kernel = rng.randn(smax - smin + 1)
kernel -= np.mean(kernel)
y[:, ii % y.shape[-1]] = np.convolve(X[:, ii], kernel, 'same')
x_xt, x_yt, n_ch_x, _, _ = _compute_corrs(X, y, smin, smax + 1)
X_del = _delay_time_series(X, smin, smax, 1., fill_mean=False)
x_yt_true = einsum('tfd,to->ofd', X_del, y)
x_yt_true = np.reshape(x_yt_true, (x_yt_true.shape[0], -1)).T
assert_allclose(x_yt, x_yt_true, atol=1e-7, err_msg=(smin, smax))
X_del.shape = (X.shape[0], -1)
x_xt_true = np.dot(X_del.T, X_del).T
assert_allclose(x_xt, x_xt_true, atol=1e-7, err_msg=(smin, smax))
@pytest.mark.parametrize('n_jobs', n_jobs_test)
@requires_sklearn
def test_receptive_field_1d(n_jobs):
"""Test that the fast solving works like Ridge."""
from sklearn.linear_model import Ridge
rng = np.random.RandomState(0)
x = rng.randn(500, 1)
for delay in range(-2, 3):
y = np.zeros(500)
slims = [(-2, 4)]
if delay == 0:
y[:] = x[:, 0]
elif delay < 0:
y[:delay] = x[-delay:, 0]
slims += [(-4, -1)]
else:
y[delay:] = x[:-delay, 0]
slims += [(1, 2)]
for ndim in (1, 2):
y.shape = (y.shape[0],) + (1,) * (ndim - 1)
for slim in slims:
smin, smax = slim
lap = TimeDelayingRidge(smin, smax, 1., 0.1, 'laplacian',
fit_intercept=False, n_jobs=n_jobs)
for estimator in (Ridge(alpha=0.), Ridge(alpha=0.1), 0., 0.1,
lap):
for offset in (-100, 0, 100):
model = ReceptiveField(smin, smax, 1.,
estimator=estimator,
n_jobs=n_jobs)
use_x = x + offset
model.fit(use_x, y)
if estimator is lap:
continue # these checks are too stringent
assert_allclose(model.estimator_.intercept_, -offset,
atol=1e-1)
assert_array_equal(model.delays_,
np.arange(smin, smax + 1))
expected = (model.delays_ == delay).astype(float)
expected = expected[np.newaxis] # features
if y.ndim == 2:
expected = expected[np.newaxis] # outputs
assert_equal(model.coef_.ndim, ndim + 1)
assert_allclose(model.coef_, expected, atol=1e-3)
start = model.valid_samples_.start or 0
stop = len(use_x) - (model.valid_samples_.stop or 0)
assert stop - start >= 495
assert_allclose(
model.predict(use_x)[model.valid_samples_],
y[model.valid_samples_], atol=1e-2)
score = np.mean(model.score(use_x, y))
assert score > 0.9999
@pytest.mark.parametrize('n_jobs', n_jobs_test)
@requires_sklearn
def test_receptive_field_nd(n_jobs):
"""Test multidimensional support."""
from sklearn.linear_model import Ridge
# multidimensional
rng = np.random.RandomState(3)
x = rng.randn(1000, 3)
y = np.zeros((1000, 2))
smin, smax = 0, 5
# This is a weird assignment, but it's just a way to distribute some
# unique values at various delays, and "expected" explains how they
# should appear in the resulting RF
for ii in range(1, 5):
y[ii:, ii % 2] += (-1) ** ii * ii * x[:-ii, ii % 3]
y -= np.mean(y, axis=0)
x -= np.mean(x, axis=0)
x_off = x + 1e3
expected = [
[[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 4, 0],
[0, 0, 2, 0, 0, 0]],
[[0, 0, 0, -3, 0, 0],
[0, -1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]],
]
tdr_l = TimeDelayingRidge(smin, smax, 1., 0.1, 'laplacian', n_jobs=n_jobs)
tdr_nc = TimeDelayingRidge(smin, smax, 1., 0.1, n_jobs=n_jobs,
edge_correction=False)
for estimator, atol in zip((Ridge(alpha=0.), 0., 0.01, tdr_l, tdr_nc),
(1e-3, 1e-3, 1e-3, 5e-3, 5e-2)):
model = ReceptiveField(smin, smax, 1.,
estimator=estimator)
model.fit(x, y)
assert_array_equal(model.delays_,
np.arange(smin, smax + 1))
assert_allclose(model.coef_, expected, atol=atol)
tdr = TimeDelayingRidge(smin, smax, 1., 0.01, reg_type='foo',
n_jobs=n_jobs)
model = ReceptiveField(smin, smax, 1., estimator=tdr)
with pytest.raises(ValueError, match='reg_type entries must be one of'):
model.fit(x, y)
tdr = TimeDelayingRidge(smin, smax, 1., 0.01, reg_type=['laplacian'],
n_jobs=n_jobs)
model = ReceptiveField(smin, smax, 1., estimator=tdr)
with pytest.raises(ValueError, match='reg_type must have two elements'):
model.fit(x, y)
model = ReceptiveField(smin, smax, 1, estimator=tdr, fit_intercept=False)
with pytest.raises(ValueError, match='fit_intercept'):
model.fit(x, y)
# Now check the intercept_
tdr = TimeDelayingRidge(smin, smax, 1., 0., n_jobs=n_jobs)
tdr_no = TimeDelayingRidge(smin, smax, 1., 0., fit_intercept=False,
n_jobs=n_jobs)
for estimator in (Ridge(alpha=0.), tdr,
Ridge(alpha=0., fit_intercept=False), tdr_no):
# first with no intercept in the data
model = ReceptiveField(smin, smax, 1., estimator=estimator)
model.fit(x, y)
assert_allclose(model.estimator_.intercept_, 0., atol=1e-7,
err_msg=repr(estimator))
assert_allclose(model.coef_, expected, atol=1e-3,
err_msg=repr(estimator))
y_pred = model.predict(x)
assert_allclose(y_pred[model.valid_samples_],
y[model.valid_samples_],
atol=1e-2, err_msg=repr(estimator))
score = np.mean(model.score(x, y))
assert score > 0.9999
# now with an intercept in the data
model.fit(x_off, y)
if estimator.fit_intercept:
val = [-6000, 4000]
itol = 0.5
ctol = 5e-4
else:
val = itol = 0.
ctol = 2.
assert_allclose(model.estimator_.intercept_, val, atol=itol,
err_msg=repr(estimator))
assert_allclose(model.coef_, expected, atol=ctol, rtol=ctol,
err_msg=repr(estimator))
if estimator.fit_intercept:
ptol = 1e-2
stol = 0.999999
else:
ptol = 10
stol = 0.6
y_pred = model.predict(x_off)[model.valid_samples_]
assert_allclose(y_pred, y[model.valid_samples_],
atol=ptol, err_msg=repr(estimator))
score = np.mean(model.score(x_off, y))
assert score > stol, estimator
model = ReceptiveField(smin, smax, 1., fit_intercept=False)
model.fit(x_off, y)
assert_allclose(model.estimator_.intercept_, 0., atol=1e-7)
score = np.mean(model.score(x_off, y))
assert score > 0.6
def _make_data(n_feats, n_targets, n_samples, tmin, tmax):
rng = np.random.RandomState(0)
X = rng.randn(n_samples, n_feats)
w = rng.randn(int((tmax - tmin) + 1) * n_feats, n_targets)
# Delay inputs
X_del = np.concatenate(
_delay_time_series(X, tmin, tmax, 1.).transpose(2, 0, 1), axis=1)
y = np.dot(X_del, w)
return X, y
@requires_sklearn
def test_inverse_coef():
"""Test inverse coefficients computation."""
from sklearn.linear_model import Ridge
tmin, tmax = 0., 10.
n_feats, n_targets, n_samples = 3, 2, 1000
n_delays = int((tmax - tmin) + 1)
# Check coefficient dims, for all estimator types
X, y = _make_data(n_feats, n_targets, n_samples, tmin, tmax)
tdr = TimeDelayingRidge(tmin, tmax, 1., 0.1, 'laplacian')
for estimator in (0., 0.01, Ridge(alpha=0.), tdr):
rf = ReceptiveField(tmin, tmax, 1., estimator=estimator,
patterns=True)
rf.fit(X, y)
inv_rf = ReceptiveField(tmin, tmax, 1., estimator=estimator,
patterns=True)
inv_rf.fit(y, X)
assert_array_equal(rf.coef_.shape, rf.patterns_.shape,
(n_targets, n_feats, n_delays))
assert_array_equal(inv_rf.coef_.shape, inv_rf.patterns_.shape,
(n_feats, n_targets, n_delays))
# we should have np.dot(patterns.T,coef) ~ np.eye(n)
c0 = rf.coef_.reshape(n_targets, n_feats * n_delays)
c1 = rf.patterns_.reshape(n_targets, n_feats * n_delays)
assert_allclose(np.dot(c0, c1.T), np.eye(c0.shape[0]), atol=0.2)
@requires_sklearn
def test_linalg_warning():
"""Test that warnings are issued when no regularization is applied."""
from sklearn.linear_model import Ridge
n_feats, n_targets, n_samples = 5, 60, 50
X, y = _make_data(n_feats, n_targets, n_samples, tmin, tmax)
for estimator in (0., Ridge(alpha=0.)):
rf = ReceptiveField(tmin, tmax, 1., estimator=estimator)
with pytest.warns((RuntimeWarning, UserWarning),
match='[Singular|scipy.linalg.solve]'):
rf.fit(y, X)
| rkmaddox/mne-python | mne/decoding/tests/test_receptive_field.py | Python | bsd-3-clause | 22,664 |
# -*- coding: utf-8 -*-
###############################################################################
#
# SegmentationForVisitorsTodayViaSearch
# Retrieves segmentation data for visitors today who arrived via a search.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class SegmentationForVisitorsTodayViaSearch(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the SegmentationForVisitorsTodayViaSearch Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(SegmentationForVisitorsTodayViaSearch, self).__init__(temboo_session, '/Library/Clicky/SegmentationForVisitorsTodayViaSearch')
def new_input_set(self):
return SegmentationForVisitorsTodayViaSearchInputSet()
def _make_result_set(self, result, path):
return SegmentationForVisitorsTodayViaSearchResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return SegmentationForVisitorsTodayViaSearchChoreographyExecution(session, exec_id, path)
class SegmentationForVisitorsTodayViaSearchInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the SegmentationForVisitorsTodayViaSearch
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_Limit(self, value):
"""
Set the value of the Limit input for this Choreo. ((optional, integer) The number of records you want to retrieve. Defaults to 30.)
"""
super(SegmentationForVisitorsTodayViaSearchInputSet, self)._set_input('Limit', value)
def set_Output(self, value):
"""
Set the value of the Output input for this Choreo. ((optional, string) What format you want the returned data to be in. Accepted values: xml, php, json, csv. Defaults to 'xml'.)
"""
super(SegmentationForVisitorsTodayViaSearchInputSet, self)._set_input('Output', value)
def set_SiteID(self, value):
"""
Set the value of the SiteID input for this Choreo. ((required, integer) Your request must include the site's ID that you want to access data from. Available from your site preferences page.)
"""
super(SegmentationForVisitorsTodayViaSearchInputSet, self)._set_input('SiteID', value)
def set_SiteKey(self, value):
"""
Set the value of the SiteKey input for this Choreo. ((required, string) The unique key assigned to you when you first register with Clicky. Available from your site preferences page.)
"""
super(SegmentationForVisitorsTodayViaSearchInputSet, self)._set_input('SiteKey', value)
def set_Type(self, value):
"""
Set the value of the Type input for this Choreo. ((optional, string) The type of data you want to retrieve. Defaults to "segmentation".)
"""
super(SegmentationForVisitorsTodayViaSearchInputSet, self)._set_input('Type', value)
class SegmentationForVisitorsTodayViaSearchResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the SegmentationForVisitorsTodayViaSearch Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Clicky formatted as specified in the Output parameter. Default is XML.)
"""
return self._output.get('Response', None)
class SegmentationForVisitorsTodayViaSearchChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return SegmentationForVisitorsTodayViaSearchResultSet(response, path)
| jordanemedlock/psychtruths | temboo/core/Library/Clicky/SegmentationForVisitorsTodayViaSearch.py | Python | apache-2.0 | 4,690 |
# simpleSQL.py
#
# simple demo of using the parsing library to do simple-minded SQL parsing
# could be extended to include where clauses etc.
#
# Copyright (c) 2003, Paul McGuire
#
from pyparsing import Literal, CaselessLiteral, Word, Upcase, delimitedList, Optional, \
Combine, Group, alphas, nums, alphanums, ParseException, Forward, oneOf, quotedString, \
ZeroOrMore, restOfLine, Keyword
def test( str ):
print str,"->"
try:
tokens = simpleSQL.parseString( str )
print "tokens = ", tokens
print "tokens.columns =", tokens.columns
print "tokens.tables =", tokens.tables
print "tokens.where =", tokens.where
except ParseException, err:
print " "*err.loc + "^\n" + err.msg
print err
print
# define SQL tokens
selectStmt = Forward()
selectToken = Keyword("select", caseless=True)
fromToken = Keyword("from", caseless=True)
ident = Word( alphas, alphanums + "_$" ).setName("identifier")
columnName = Upcase( delimitedList( ident, ".", combine=True ) )
columnNameList = Group( delimitedList( columnName ) )
tableName = Upcase( delimitedList( ident, ".", combine=True ) )
tableNameList = Group( delimitedList( tableName ) )
whereExpression = Forward()
and_ = Keyword("and", caseless=True)
or_ = Keyword("or", caseless=True)
in_ = Keyword("in", caseless=True)
E = CaselessLiteral("E")
binop = oneOf("= != < > >= <= eq ne lt le gt ge", caseless=True)
arithSign = Word("+-",exact=1)
realNum = Combine( Optional(arithSign) + ( Word( nums ) + "." + Optional( Word(nums) ) |
( "." + Word(nums) ) ) +
Optional( E + Optional(arithSign) + Word(nums) ) )
intNum = Combine( Optional(arithSign) + Word( nums ) +
Optional( E + Optional("+") + Word(nums) ) )
columnRval = realNum | intNum | quotedString | columnName # need to add support for alg expressions
whereCondition = Group(
( columnName + binop + columnRval ) |
( columnName + in_ + "(" + delimitedList( columnRval ) + ")" ) |
( columnName + in_ + "(" + selectStmt + ")" ) |
( "(" + whereExpression + ")" )
)
whereExpression << whereCondition + ZeroOrMore( ( and_ | or_ ) + whereExpression )
# define the grammar
selectStmt << ( selectToken +
( '*' | columnNameList ).setResultsName( "columns" ) +
fromToken +
tableNameList.setResultsName( "tables" ) +
Optional( Group( CaselessLiteral("where") + whereExpression ), "" ).setResultsName("where") )
simpleSQL = selectStmt
# define Oracle comment format, and ignore them
oracleSqlComment = "--" + restOfLine
simpleSQL.ignore( oracleSqlComment )
test( "SELECT * from XYZZY, ABC" )
test( "select * from SYS.XYZZY" )
test( "Select A from Sys.dual" )
test( "Select A,B,C from Sys.dual" )
test( "Select A, B, C from Sys.dual" )
test( "Select A, B, C from Sys.dual, Table2 " )
test( "Xelect A, B, C from Sys.dual" )
test( "Select A, B, C frox Sys.dual" )
test( "Select" )
test( "Select &&& frox Sys.dual" )
test( "Select A from Sys.dual where a in ('RED','GREEN','BLUE')" )
test( "Select A from Sys.dual where a in ('RED','GREEN','BLUE') and b in (10,20,30)" )
test( "Select A,b from table1,table2 where table1.id eq table2.id -- test out comparison operators" )
"""
Test output:
>pythonw -u simpleSQL.py
SELECT * from XYZZY, ABC ->
tokens = ['select', '*', 'from', ['XYZZY', 'ABC']]
tokens.columns = *
tokens.tables = ['XYZZY', 'ABC']
select * from SYS.XYZZY ->
tokens = ['select', '*', 'from', ['SYS.XYZZY']]
tokens.columns = *
tokens.tables = ['SYS.XYZZY']
Select A from Sys.dual ->
tokens = ['select', ['A'], 'from', ['SYS.DUAL']]
tokens.columns = ['A']
tokens.tables = ['SYS.DUAL']
Select A,B,C from Sys.dual ->
tokens = ['select', ['A', 'B', 'C'], 'from', ['SYS.DUAL']]
tokens.columns = ['A', 'B', 'C']
tokens.tables = ['SYS.DUAL']
Select A, B, C from Sys.dual ->
tokens = ['select', ['A', 'B', 'C'], 'from', ['SYS.DUAL']]
tokens.columns = ['A', 'B', 'C']
tokens.tables = ['SYS.DUAL']
Select A, B, C from Sys.dual, Table2 ->
tokens = ['select', ['A', 'B', 'C'], 'from', ['SYS.DUAL', 'TABLE2']]
tokens.columns = ['A', 'B', 'C']
tokens.tables = ['SYS.DUAL', 'TABLE2']
Xelect A, B, C from Sys.dual ->
^
Expected 'select'
Expected 'select' (0), (1,1)
Select A, B, C frox Sys.dual ->
^
Expected 'from'
Expected 'from' (15), (1,16)
Select ->
^
Expected '*'
Expected '*' (6), (1,7)
Select &&& frox Sys.dual ->
^
Expected '*'
Expected '*' (7), (1,8)
>Exit code: 0
""" | dbbhattacharya/kitsune | vendor/packages/pyparsing/examples/simpleSQL.py | Python | bsd-3-clause | 4,698 |
#********************************************************************************
#--------------------------------------------------------------------------------
#
# Significance Labs
# Brooklyn, NYC
#
# Author: Alexandra Berke (aberke)
# Written: Summer 2014
#
#
#
# util file
#
#
#--------------------------------------------------------------------------------
#*********************************************************************************
from flask import Response
import json
from bson import ObjectId
from datetime import datetime
import error_codes
from functools import wraps
from flask import request, current_app
def jsonp(func):
"""
Taken from: https://gist.github.com/1094140
Wraps JSONified output for JSONP requests.
"""
@wraps(func)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
data = str(func(*args, **kwargs).data)
content = str(callback) + '(' + data + ')'
mimetype = 'application/javascript'
return current_app.response_class(content, mimetype=mimetype)
else:
return func(*args, **kwargs)
return decorated_function
class JSONEncoder(json.JSONEncoder):
# Custom JSONJSONencoder because by default, json cannot handle datetimes or ObjectIds """
def default(self, o):
if isinstance(o, datetime):
return str(o)
if isinstance(o, ObjectId):
return str(o)
return json.JSONEncoder.default(self, o)
def decode(self, data):
if not type(data) == dict:
return data
if '_id' in data:
data['_id'] = ObjectId(data['_id'])
if '_cleaner' in data:
data['_cleaner'] = ObjectId(data['_cleaner'])
if '_list' in data:
data['_list'] = ObjectId(data['_list'])
if '_room' in data:
data['_room'] = ObjectId(data['_room'])
return data
def load(self, data):
if not data:
return None
return self.decode(json.loads(data))
JSONencoder = JSONEncoder()
class APIexception(Exception):
code = 0
message = None
original_message = None
def yell(self, message):
print("\n************ ERROR **************\n" + str(message) + "\n************* ERROR *************\n")
def __init__(self, message='', code=0):
if not code in error_codes.map:
self.yell('Invalid error code: ' + str(code))
code = 0
self.code = code
self.original_message = message
self.message = error_codes.map[code]
Exception.__init__(self, message)
yellERROR("Original message: {0}\nMessage: {1}".format(self.original_message, self.message)) # yell the error for logs
def respond500(exception):
"""
@param {int} code: error code for which to find error string in error_codes map
@param {str} err: optional error message to YELL to server for logs
Philosophy:
Return a small set of error strings
- These strings are keywords in the translations spreadsheet that have translations
Use:
When endpoints are hit with bad data or cause accidental exceptions to occur
It catches accidentally raised Exceptions
- In this case, expects code==0
- Returns nicely formatted response to user rather than cryptic mongo/python error
It it called directly when endpoint receives invalid data
- Expects code in error_codes map
"""
if not isinstance(exception, APIexception):
exception = APIexception(message=exception.message)
data = json.dumps({ 'message': exception.message, 'code': exception.code })
response_headers = {'Content-Type': 'application/json'}
return Response(data, 500, response_headers)
def respond200():
return Response(status=200)
def dumpJSON(data):
if not isinstance(data, str):
data = JSONencoder.encode(data)
response_headers = {'Content-Type': 'application/json'}
return Response(data, 200, response_headers)
def yellERROR(msg=None):
print("\n************ ERROR **************\n" + str(msg) + "\n************* ERROR *************\n") | cromulus/check-list | app/lib/util.py | Python | mit | 3,902 |
#!/usr/bin/env python
# encoding: utf-8
"""
Created by Jeff Verkoeyen on 2011-06-07.
Copyright 2011 Jeff Verkoeyen
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# The exposed interfaces for the ios module.
from pbxproj import pbxproj
from relpath import relpath
| jyotiness/ARTSDK | thirdparty/nimbus/scripts/ios/__init__.py | Python | mit | 747 |
#/###################/#
# Import modules
#
#ImportModules
import ShareYourSystem as SYS
#/###################/#
# Build the model
#
#Define
MyTransferer=SYS.TransfererClass(
).stationarize(
_MeanWeightVariable=[
[0.,-10.],
[10.,0.]
],
_RateVariable = [5.,5.],
_InteractionStr = "Spike"
).stabilize(
_ComputeBool=False
).transfer(
_ColorStrsList = ["red","blue"]
).view(
).pyplot(
).show(
)
#print
print('MyTransferer is ')
SYS._print(MyTransferer)
| Ledoux/ShareYourSystem | Pythonlogy/ShareYourSystem/Specials/_Lifers/Transferer/tests/test_13_transfer_lif_ExampleCell.py | Python | mit | 480 |
# Generated by Django 2.2.13 on 2020-07-06 15:28
from __future__ import unicode_literals
from django.db import migrations
import re
PERMISSIONS = ['view', 'add', 'change', 'delete']
MODELS = {'core': ['Annotation',
'Attachment',
'Build',
'Environment',
'Metric',
'Status',
'Suite',
'TestRun'],
'ci': ['TestJob']}
def split_on_upper_and_make_lower(name):
split = re.findall('[A-Z][^A-Z]*', name)
return [w.lower() for w in split]
def create_squad_group_and_add_users(apps, schema_editor):
Group = apps.get_model('auth', 'Group')
Permission = apps.get_model('auth', 'Permission')
ContentType = apps.get_model('contenttypes', 'ContentType')
squad_group, created = Group.objects.get_or_create(name='squad')
for app, model in MODELS.items():
for m in model:
ct = ContentType.objects.get_for_model(apps.get_model(app, m))
for permission in PERMISSIONS:
split_words = split_on_upper_and_make_lower(m)
perm_name = 'Can {}' + (' {}' * len(split_words))
name = perm_name.format(permission, *split_words)
codename = '_'.join([permission, m.lower()])
try:
perm = Permission.objects.get(name=name, codename=codename, content_type=ct)
except Permission.DoesNotExist:
perm = Permission.objects.create(name=name, codename=codename, content_type=ct)
squad_group.permissions.add(perm)
User = apps.get_model('auth', 'User')
for user in User.objects.all():
squad_group.user_set.add(user)
class Migration(migrations.Migration):
dependencies = [
('core', '0130_project_status_baseline_next'),
]
operations = [
migrations.RunPython(
create_squad_group_and_add_users,
reverse_code=migrations.RunPython.noop
)
]
| Linaro/squad | squad/core/migrations/0131_create_squad_auth_group_and_add_users.py | Python | agpl-3.0 | 2,033 |
import sys
import os
import inspect
from PyQt5 import QtWidgets, QtCore, QtGui
import plugnplay
from uptime import boottime
from TardisUtil import TardisOptions, TimeSubmitter
class TardisDiff(QtWidgets.QMainWindow):
def __init__(self):
super(TardisDiff, self).__init__()
self.difference = 0
self.clipboard = QtWidgets.QApplication.clipboard()
# Set hot keys
QtWidgets.QShortcut(QtGui.QKeySequence("Ctrl+Shift+C"), self,
self.setClipboard)
QtWidgets.QShortcut(QtGui.QKeySequence("Ctrl+Shift+T"), self,
self.notify_time_submitters)
self.options = TardisOptions()
# Get plugins
plugnplay.plugin_dirs = ['./plugins', ]
plugnplay.load_plugins()
# Get directory path
# From: http://stackoverflow.com/a/22881871/1963958
if getattr(sys, 'frozen', False): # py2exe, PyInstaller, cx_Freeze
script_path = os.path.abspath(sys.executable)
else:
script_path = inspect.getabsfile(TardisDiff)
script_path = os.path.realpath(script_path)
script_path = os.path.dirname(script_path)
# Google for a fancy tardis icon until I've made one
self.setWindowIcon(QtGui.QIcon(
os.path.join(script_path, 'icon', 'tardis-by-camilla-isabell-kasbo.ico')))
self.initUI()
def initUI(self):
# Create and initialize UI elements
self.contentWidget = QtWidgets.QWidget()
self.gridLayout = QtWidgets.QGridLayout(self.contentWidget)
self.formLayout = QtWidgets.QFormLayout()
self.timeEdit1 = QtWidgets.QTimeEdit(self.contentWidget)
self.timeEdit2 = QtWidgets.QTimeEdit(self.contentWidget)
self.timeEditBreakTime = QtWidgets.QTimeEdit(self.contentWidget)
self.timeEditBreakTime.setDisplayFormat("h:mm")
self.timeEditBreakTime.setCurrentSection(
QtWidgets.QDateTimeEdit.MinuteSection)
self.timeEditBreakTime.setTime(QtCore.QTime(0, 30))
self.label_timeDiffOut = QtWidgets.QLabel(self.contentWidget)
self.button_time1_now = QtWidgets.QPushButton(
"Now", self.contentWidget)
self.button_time2_now = QtWidgets.QPushButton(
"Now", self.contentWidget)
self.label_timeDiffOut.setText("")
self.timeEdit1.setTime(self.getStartTime())
self.timeEdit2.setTime(QtCore.QTime.currentTime())
# Add UI elements
row1 = QtWidgets.QHBoxLayout()
row1.addWidget(self.timeEdit1)
row1.addWidget(self.button_time1_now)
row2 = QtWidgets.QHBoxLayout()
row2.addWidget(self.timeEdit2)
row2.addWidget(self.button_time2_now)
self.formLayout.addRow("Time 1:", row1)
self.formLayout.addRow("Time 2:", row2)
self.formLayout.addRow("Break Time:", self.timeEditBreakTime)
self.formLayout.addRow("Difference:", self.label_timeDiffOut)
self.gridLayout.addLayout(self.formLayout, 0, 0, 1, 1)
self.setCentralWidget(self.contentWidget)
self.statusBar()
# connect slots
self.timeEdit1.timeChanged.connect(self.inputChanged)
self.timeEdit2.timeChanged.connect(self.inputChanged)
self.timeEditBreakTime.timeChanged.connect(self.inputChanged)
self.button_time1_now.pressed.connect(self.reset_time1)
self.button_time2_now.pressed.connect(self.reset_time2)
self.setWindowTitle('TardisDiff')
self.inputChanged()
self.show()
def inputChanged(self):
"""
Checks both time inputs and the break time
input to determine the difference.
Then calls the method to update the ui.
"""
time1 = self.timeEdit1.time()
time2 = self.timeEdit2.time()
breakTime = self.timeEditBreakTime.time().secsTo(QtCore.QTime(0, 0))
self.difference = (time1.secsTo(time2) + breakTime) / 3600
self.difference = round(self.difference, 2)
self.label_timeDiffOut.setText(str(self.difference))
def reset_time1(self):
self.timeEdit1.setTime(QtCore.QTime.currentTime())
def reset_time2(self):
self.timeEdit2.setTime(QtCore.QTime.currentTime())
def setClipboard(self):
"""Sets the current diff text to clipboard"""
self.clipboard.setText(str(self.difference))
self.statusBar().showMessage("Copied to clipboard.")
def getStartTime(self):
return TardisDiff.getBootTimeAsQTime()\
if self.options.isStartTimeAuto()\
else QtCore.QTime.fromString(self.options.getStartTime())
def notify_time_submitters(self):
TimeSubmitter.submit_time(self.difference)
@staticmethod
def getBootTimeAsQTime():
return QtCore.QDateTime(boottime()).time()
def main():
app = QtWidgets.QApplication(sys.argv)
ed = TardisDiff()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| git-commit/TardisDiff | TardisDiff.py | Python | isc | 4,981 |
# F3AT - Flumotion Asynchronous Autonomous Agent Toolkit
# Copyright (C) 2010,2011 Flumotion Services, S.A.
# All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# See "LICENSE.GPL" in the source distribution for more information.
# Headers in this file shall remain intact.
import json
from zope.interface import implements
from feat.common import defer, serialization, error, log
from feat.common.serialization import json as feat_json
from feat.common.container import AsyncDict
from feat.web import document
from feat.models.interface import IModel, IReference
from feat.models.interface import IErrorPayload
from feat.models.interface import IActionPayload, IMetadata, IAttribute
from feat.models.interface import IValueCollection, IValueOptions, IValueRange
from feat.models.interface import IEncodingInfo, ValueTypes
from feat.models.interface import Unauthorized
MIME_TYPE = "application/json"
class ActionPayload(dict):
implements(IActionPayload)
def render_metadata(obj):
result = []
if IMetadata.providedBy(obj):
metadata = IMetadata(obj)
for metaitem in metadata.iter_meta():
m = {"name": metaitem.name,
"value": metaitem.value}
if metaitem.scheme is not None:
m["scheme"] = metaitem.scheme
result.append(m)
return result
def render_model_items(model, context):
return model.fetch_items().addCallback(render_items, context)
def render_items(items, context):
result = AsyncDict()
for item in items:
result.add(item.name, render_item(item, context))
return result.wait()
def render_model_actions(model, context):
return model.fetch_actions().addCallback(render_actions, context)
def render_actions(actions, context):
result = AsyncDict()
for action in actions:
result.add(action.name, render_action(action, context))
return result.wait()
def render_item(item, context):
result = AsyncDict()
result.add_if_not_none("label", item.label)
result.add_if_not_none("desc", item.desc)
result.add_if_true("metadata", render_metadata(item))
result.add_result("href", item.reference, "resolve", context)
args = (context, result)
return item.fetch().addCallbacks(render_attribute, filter_errors,
callbackArgs=args, errbackArgs=args)
def render_attribute(model, context, result=None):
if not IAttribute.providedBy(model):
return result and result.wait()
result = result or AsyncDict()
subcontext = context.descend(model)
attr = IAttribute(model)
result.add("info", render_value_info(attr.value_info))
result.add_if_true("readable", attr.is_readable)
result.add_if_true("writable", attr.is_writable)
result.add_if_true("deletable", attr.is_deletable)
if attr.is_readable:
if attr.value_info.value_type is not ValueTypes.binary:
d = attr.fetch_value()
d.addCallback(render_value, subcontext)
result.add("value", d)
return result.wait()
def filter_errors(failure, context, result):
failure.trap(Unauthorized)
return result and result.wait()
def render_action(action, context):
result = AsyncDict()
result.add_if_not_none("label", action.label)
result.add_if_not_none("desc", action.desc)
result.add_if_true("metadata", render_metadata(action))
result.add("method", context.get_action_method(action).name)
result.add_if_true("idempotent", bool(action.is_idempotent))
result.add("category", action.category.name)
result.add_result("href", action.reference, "resolve", context)
if action.result_info is not None:
result.add("result", render_value_info(action.result_info))
if action.parameters:
result.add("params", render_params(action.parameters))
return result.wait()
def render_value_info(value):
result = AsyncDict()
result.add("type", value.value_type.name)
if value.use_default:
result.add("default", value.default)
result.add_if_not_none("label", value.label)
result.add_if_not_none("desc", value.desc)
result.add_if_true("metadata", render_metadata(value))
if IEncodingInfo.providedBy(value):
encinfo = IEncodingInfo(value)
result.add_if_not_none("mimetype", encinfo.mime_type)
result.add_if_not_none("encoding", encinfo.encoding)
if IValueCollection.providedBy(value):
coll = IValueCollection(value)
allowed = [render_value_info(v) for v in coll.allowed_types]
result.add("allowed", defer.join(*allowed))
result.add("ordered", coll.is_ordered)
result.add_if_not_none("min_size", coll.min_size)
result.add_if_not_none("max_size", coll.max_size)
if IValueRange.providedBy(value):
vrange = IValueRange(value)
result.add("minimum", vrange.minimum)
result.add("maximum", vrange.maximum)
result.add_if_not_none("increment", vrange.increment)
if IValueOptions.providedBy(value):
options = IValueOptions(value)
result.add("restricted", options.is_restricted)
result.add("options", [{"label": o.label, "value": o.value}
for o in options.iter_options()])
return result.wait()
def render_params(params):
result = AsyncDict()
for param in params:
result.add(param.name, render_param(param))
return result.wait()
def render_param(param):
result = AsyncDict()
result.add("required", param.is_required)
result.add("info", render_value_info(param.value_info))
result.add_if_not_none("label", param.label)
result.add_if_not_none("desc", param.desc)
return result.wait()
def render_value(value, context):
if IReference.providedBy(value):
return value.resolve(context)
return value
def render_verbose(model, context):
result = AsyncDict()
result.add("identity", model.identity)
result.add_if_not_none("name", model.name)
result.add_if_not_none("label", model.label)
result.add_if_not_none("desc", model.desc)
result.add_result("href", model.reference, "resolve", context)
result.add_if_true("metadata", render_metadata(model))
result.add_if_true("items", render_model_items(model, context))
result.add_if_true("actions", render_model_actions(model, context))
return render_attribute(model, context, result)
def render_compact_model(model, context):
if IAttribute.providedBy(model):
attr = IAttribute(model)
if attr.is_readable:
d = attr.fetch_value()
d.addCallback(render_value, context)
return d
return defer.succeed(None)
if render_as_list(model):
return render_model_as_list(model, context)
result = AsyncDict()
if model.reference:
result.add_result("href", model.reference, "resolve", context)
d = model.fetch_items()
d.addCallback(render_compact_items, context, result)
return d
def render_compact_items(items, context, result):
for item in items:
if render_inline(item):
d = item.fetch()
d.addCallback(render_inline_model, context)
result.add(item.name, d)
elif iattribute_meta(item) and not prevent_inline(item):
d = item.fetch()
d.addCallback(render_compact_attribute, item, context)
result.add(item.name, d)
elif item.reference is not None:
result.add(item.name, item.reference.resolve(context))
return result.wait()
def _parse_meta(meta_items):
return [i.strip() for i in meta_items.value.split(",")]
def get_parsed_meta(meta):
if not IMetadata.providedBy(meta):
return []
parsed = [_parse_meta(i) for i in meta.get_meta('json')]
return parsed
def iattribute_meta(meta):
parsed = get_parsed_meta(meta)
return ['attribute'] in parsed
def render_inline(meta):
parsed = get_parsed_meta(meta)
return ['render-inline'] in parsed
def render_as_list(meta):
parsed = get_parsed_meta(meta)
return ['render-as-list'] in parsed
def prevent_inline(meta):
parsed = get_parsed_meta(meta)
return ['prevent-inline'] in parsed
def render_compact_attribute(submodel, item, context):
attr = IAttribute(submodel)
if attr.value_info.value_type is ValueTypes.binary:
if item.reference is not None:
return item.reference.resolve(context)
elif attr.is_readable:
d = attr.fetch_value()
d.addCallback(render_value, context)
return d
def filter_model_errors(failure, item, context):
failure.trap(Unauthorized)
if item.reference is not None:
return item.reference.resolve(context)
return failure
def render_json(data, doc):
if doc.encoding == 'nested-json':
doc.write(data)
else:
enc = CustomJSONEncoder(encoding=doc.encoding)
doc.write(enc.encode(data))
def write_model(doc, obj, *args, **kwargs):
context = kwargs["context"]
verbose = "format" in kwargs and "verbose" in kwargs["format"]
if verbose:
d = render_verbose(obj, context)
else:
d = render_compact_model(obj, context)
return d.addCallback(render_json, doc)
class NestedJson(document.BaseDocument):
'''
This is an implementation used to represent nested documents which
are rendered inline. It is used in CustomJSONEncoder to injects
preserialized parts of resulting json into the structure.
'''
implements(document.IWritableDocument)
def __init__(self):
document.BaseDocument.__init__(self, MIME_TYPE, 'nested-json')
self._data = None
def get_data(self):
return self._data
### IWriter ###
def write(self, data):
self._data = data
def writelines(self, sequence):
raise NotImplementedError("This should not be used for NestedJson")
def render_inline_model(obj, context, *args, **kwargs):
obj = IModel(obj)
doc = NestedJson()
d = document.write(doc, obj, context=context)
d.addCallback(defer.override_result, doc)
return d
def render_model_as_list(obj, context):
def got_items(items):
defers = list()
for item in items:
d = item.fetch()
d.addCallbacks(render_inline_model, filter_model_errors,
callbackArgs=(context, ),
errbackArgs=(item, context))
defers.append(d)
return defer.DeferredList(defers, consumeErrors=True)
d = obj.fetch_items()
d.addCallback(got_items)
d.addCallback(unpack_deferred_list_result)
d.addCallback(list)
return d
def unpack_deferred_list_result(results):
for successful, result in results:
if not successful:
error.handle_failure(None, result, "Failed rendering inline model")
continue
yield result
def write_reference(doc, obj, *args, **kwargs):
context = kwargs["context"]
result = obj.resolve(context)
render_json({u"type": u"reference", u"href": result}, doc)
def write_error(doc, obj, *args, **kwargs):
result = {}
result[u"type"] = u"error"
result[u"error"] = unicode(obj.error_type.name)
if obj.error_code is not None:
result[u"code"] = int(obj.error_code)
if obj.message is not None:
result[u"message"] = obj.message
if obj.subjects is not None:
result[u"subjects"] = list(obj.subjects)
if obj.reasons:
result[u"reasons"] = dict([k, str(v)]
for k, v in obj.reasons.iteritems())
if obj.debug is not None:
result[u"debug"] = obj.debug
if obj.stamp:
result[u"stamp"] = obj.stamp
log.debug('application/json',
'Wrote error response with debug stamp: %s', obj.stamp)
log.debug('application/json', 'Error: %s', result[u'error'])
if obj.message:
log.debug('application/json', 'Message: %s', obj.message)
render_json(result, doc)
def write_anything(doc, obj, *args, **kwargs):
render_json(obj, doc)
def read_action(doc, *args, **kwargs):
data = doc.read()
if not data:
return ActionPayload()
try:
params = json.loads(data)
except ValueError, e:
raise document.DocumentFormatError("Invalid JSON document: %s"
% (e, ))
if not isinstance(params, dict):
return ActionPayload([(u"value", params)])
return ActionPayload(params)
document.register_writer(write_model, MIME_TYPE, IModel)
document.register_writer(write_error, MIME_TYPE, IErrorPayload)
document.register_writer(write_reference, MIME_TYPE, IReference)
# document.register_writer(write_serializable, MIME_TYPE,
# serialization.ISerializable)
document.register_writer(write_anything, MIME_TYPE, None)
document.register_reader(read_action, MIME_TYPE, IActionPayload)
### private ###
class CustomJSONEncoder(json.JSONEncoder):
def __init__(self, context=None, encoding=None):
kwargs = {"indent": 2}
if encoding is not None:
kwargs["encoding"] = encoding
json.JSONEncoder.__init__(self, **kwargs)
self._serializer = feat_json.PreSerializer()
def default(self, obj):
if serialization.ISerializable.providedBy(obj):
return self._serializer.convert(obj)
if serialization.ISnapshotable.providedBy(obj):
return self._serializer.freeze(obj)
if isinstance(obj, NestedJson):
# models marked with render-inline are rendered into a separate
# IWritableDocument instance, which is here injected into its
# placeholder in the resulting document
return obj.get_data()
return json.JSONEncoder.default(self, obj)
| f3at/feat | src/feat/models/applicationjson.py | Python | gpl-2.0 | 14,537 |
from mdt import CompartmentTemplate, FreeParameterTemplate
from mdt.model_building.parameter_functions.transformations import ScaleTransform
__author__ = 'Robbert Harms'
__date__ = "2015-06-21"
__maintainer__ = "Robbert Harms"
__email__ = "robbert@xkls.nl"
class TimeDependentZeppelin(CompartmentTemplate):
"""Implements a Zeppelin (cylindrical symmetric Tensor) with time dependence in the perpendicular diffusivity.
The perpendicular diffusivity is calculated as:
.. math::
D_{h, \perp} = D_{h,\infty} + A \frac{\ln(\Delta/\delta) + 3/2}{\Delta - \delta/3}
For a detailed description please see equation 11 in [1].
References:
[1] De Santis, S., Jones D., Roebroeck A., 2016. Including diffusion time dependence in the extra-axonal space
improves in vivo estimates of axonal diameter and density in human white matter, NeuroImage 2016.
"""
parameters = ('g', 'b', 'd', 'd_bulk', 'theta', 'phi', 'time_dependent_characteristic_coefficient(A)',
'Delta', 'delta')
dependencies = ('Zeppelin',)
cl_code = '''
double dperp0 = d_bulk + A * (log(Delta/delta) + 3/2.0)/(Delta - delta/3.0);
return Zeppelin(g, b, d, dperp0, theta, phi);
'''
class time_dependent_characteristic_coefficient(FreeParameterTemplate):
"""The time dependent characteristic as used in the TimeDependentZeppelin model. Values are in m^2."""
init_value = 1e-6
lower_bound = 1e-7
upper_bound = 1e-5
parameter_transform = ScaleTransform(1e7)
sampling_proposal_std = 1e-7
| cbclab/MDT | mdt/data/components/standard/compartment_models/TimeDependentZeppelin.py | Python | lgpl-3.0 | 1,598 |
"""
Functions provided to user for saving / loading / combining times data objects.
"""
from __future__ import absolute_import
from timeit import default_timer as timer
try:
import cPickle as pickle
except:
import pickle
import copy
# import mmap
# import os
from gtimer.private import focus as f
from gtimer.private import collapse
from gtimer.local.times import Times
from gtimer.local import merge
__all__ = ['get_times', 'attach_subdivision', 'attach_par_subdivision',
'save_pkl', 'load_pkl']
# 'open_mmap', 'close_mmap', 'save_mmap', 'load_mmap']
def get_times():
"""
Produce a deepcopy of the current timing data (no risk of interference
with active timing or other operaitons).
Returns:
Times: gtimer timing data structure object.
"""
if f.root.stopped:
return copy.deepcopy(f.root.times)
else:
t = timer()
times = collapse.collapse_times()
f.root.self_cut += timer() - t
return times
def attach_par_subdivision(par_name, par_times):
"""
Manual assignment of a collection of (stopped) Times objects as a parallel
subdivision of a running timer.
Notes:
An example sequence of proper usage:
1. Stamp in master process.
2. Run timed sub-processes.
3. Get timing data from sub-processes into master.
4. Attach timing data (i.e. list of Times objects) in master using this method.
5. Stamp in master process.
To stamp in the master between steps 1 and 5, it is recommended to
subdivide() between steps 1 and 2, and end that subdivision before
attaching, or else the master stamp will not reflect the sub-process
time.
Args:
par_name (any): Identifier for the collection, passed through str()
par_times (list or tuple): Collection of Times data objects.
Raises:
TypeError: If par_times not a list or tuple of Times data objects.
"""
t = timer()
if not isinstance(par_times, (list, tuple)):
raise TypeError("Expected list or tuple for param 'par_times'.")
for times in par_times:
if not isinstance(times, Times):
raise TypeError("Expected each element of param 'par_times' to be Times object.")
assert times.total > 0., "An attached par subdivision has total time 0, appears empty."
par_name = str(par_name)
sub_with_max_tot = max(par_times, key=lambda x: x.total)
f.r.self_agg += sub_with_max_tot.self_agg
if par_name not in f.t.par_subdvsn_awaiting:
f.t.par_subdvsn_awaiting[par_name] = []
for times in par_times:
times_copy = copy.deepcopy(times)
times_copy.parent = f.r
times_copy.par_in_parent = par_name
f.t.par_subdvsn_awaiting[par_name].append(times_copy)
else:
for new_sub in par_times:
is_prev_sub = False
for old_sub in f.t.par_subdvsn_awaiting[par_name]:
if old_sub.name == new_sub.name:
is_prev_sub = True
break
if is_prev_sub:
merge.merge_times(old_sub, new_sub)
else:
new_sub_copy = copy.deepcopy(new_sub)
new_sub_copy.parent = f.r
new_sub_copy.par_in_parent = par_name
f.t.par_subdvsn_awaiting[par_name].append(new_sub_copy)
f.t.self_cut += timer() - t
def attach_subdivision(times):
"""
Manual assignment of a (stopped) times object as a subdivision of running
timer. Use cases are expected to be very limited (mainly provided as a
one-Times variant of attach_par_subdivision).
Notes:
As with any subdivision, the interval in the receiving timer is assumed to
totally subsume the time accumulated within the attached object--the total
in the receiver is not adjusted!
Args:
times (Times): Individual Times data object.
Raises:
TypeError: If times not a Times data object.
"""
t = timer()
if not isinstance(times, Times):
raise TypeError("Expected Times object for param 'times'.")
assert times.total > 0., "Attached subdivision has total time 0, appears empty."
name = times.name
f.r.self_agg += times.self_agg
if name not in f.t.subdvsn_awaiting:
times_copy = copy.deepcopy(times)
times_copy.parent = f.r
f.t.subdvsn_awaiting[name] = times_copy
else:
merge.merge_times(f.t.subdvsn_awaiting[name], times)
f.t.self_cut += timer() - t
def save_pkl(filename=None, times=None):
"""
Serialize and / or save a Times data object using pickle (cPickle).
Args:
filename (None, optional): Filename to dump to. If not provided,
returns serialized object.
times (None, optional): object to dump. If non provided, uses
current root.
Returns:
pkl: Pickled Times data object, only if no filename provided.
Raises:
TypeError: If 'times' is not a Times object or a list of tuple of
them.
"""
if times is None:
if not f.root.stopped:
times = collapse.collapse_times()
else:
times = f.root.times
else:
if isinstance(times, (list, tuple)):
for t in times:
if not isinstance(t, Times):
raise TypeError("Expected single Times instance or list/tuple of Times instances for param 'times'.")
elif not isinstance(times, Times):
raise TypeError("Expected single Times instance or list/tuple of Times instances for param 'times'.")
if filename is not None:
with open(str(filename), 'wb') as file:
pickle.dump(times, file)
else:
return pickle.dumps(times)
def load_pkl(filenames):
"""
Unpickle file contents.
Args:
filenames (str): Can be one or a list or tuple of filenames to retrieve.
Returns:
Times: A single object, or from a collection of filenames, a list of Times objects.
Raises:
TypeError: If any loaded object is not a Times object.
"""
if not isinstance(filenames, (list, tuple)):
filenames = [filenames]
times = []
for name in filenames:
name = str(name)
with open(name, 'rb') as file:
loaded_obj = pickle.load(file)
if not isinstance(loaded_obj, Times):
raise TypeError("At least one loaded object is not a Times data object.")
times.append(loaded_obj)
return times if len(times) > 1 else times[0]
#
# These are still under construction...not tested and probably not functional:
#
# def open_mmap(filenames, init_size=10000, write=True):
# """
# EXPERIMENTAL: UNTESTED OR NOT FUNCTIONING.
# Args:
# filenames (TYPE): Description
# init_size (int, optional): Description
# write (bool, optional): Description
# """
# if not isinstance(filenames, (list, tuple)):
# filenames = [filenames]
# files = list()
# mmaps = list()
# for name in filenames:
# name = str(name)
# if not os.path.isfile(name):
# with open(name, 'w') as f:
# f.write(init_size * b'\0')
# if write:
# access = mmap.ACCESS_COPY
# else:
# access = mmap.ACCESS_READ
# file = open(name, 'r+')
# mm = mmap.mmap(f.fileno(), 0, access=access)
# files.append(file)
# mmaps.append(mm)
# if len(files) > 1:
# return files, mmaps
# else:
# return file, mm
# def close_mmap(mmaps, files):
# """
# EXPERIMENTAL: UNTESTED OR NOT FUNCTIONING.
# Args:
# mmaps (TYPE): Description
# files (TYPE): Description
# """
# mmaps = list(mmaps)
# files = list(files)
# for mm in mmaps:
# mm.close()
# for file in files:
# file.close()
# def save_mmap(mm, file, times=None):
# """
# EXPERIMENTAL: UNTESTED OR NOT FUNCTIONING.
# Args:
# mm (TYPE): Description
# file (TYPE): Description
# times (None, optional): Description
# """
# if times is not None:
# assert isinstance(times, Times), "Input 'times' must be None or Times object."
# times_pkl = save_pkl(times)
# filesize = mm.size()
# data_len = len(times_pkl)
# if data_len > filesize:
# mm.close()
# file.seek(0, 2)
# file.write((data_len - filesize) * b'\0')
# mm = mmap.mmap(file.fileno(), data_len, access=mmap.ACCESS_COPY)
# mm.seek(0)
# mm.write(times_pkl)
# return mm, data_len
# def load_mmap(mmaps, files, write=False):
# """
# EXPERIMENTAL: UNTESTED OR NOT FUNCTIONING.
# Args:
# mmaps (TYPE): Description
# files (TYPE): Description
# write (bool, optional): Description
# """
# mmaps = list(mmaps)
# files = list(files)
# times = list()
# if write:
# access = mmap.ACCESS_COPY
# else:
# access = mmap.ACCESS_READ
# mmaps_new = list()
# for file, mm in zip(files, mmaps):
# size = os.path.getsize(f)
# if size > mm.size():
# mm.close()
# mm_new = mmap.mmap(file.fileno(), 0, access=access)
# mmaps_new.append(mm_new)
# mm_new.seek(0)
# times.append(pickle.loads(mm_new.read(size)))
# else:
# mmaps_new.append(mm)
# mm.seek(0)
# times.append(pickle.loads(mm.read(size)))
# if len(times) > 1:
# return times, mmaps_new
# else:
# return times[0], mmaps_new[0]
| astooke/gtimer | gtimer/public/io.py | Python | mit | 9,738 |
# coding=utf-8
# Copyright 2022 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tensorflow_datasets.image_classification import binary_alpha_digits
import tensorflow_datasets.testing as tfds_test
class BinaryAlphaDigitsTest(tfds_test.DatasetBuilderTestCase):
DATASET_CLASS = binary_alpha_digits.BinaryAlphaDigits
SPLITS = {
"train": 2,
}
DL_EXTRACT_RESULT = {"train": "binaryalphadigs.mat"}
if __name__ == "__main__":
tfds_test.test_main()
| tensorflow/datasets | tensorflow_datasets/image_classification/binary_alpha_digits_test.py | Python | apache-2.0 | 998 |
import os
from setuptools import setup, find_packages
requirements_file = open('%s/requirements.txt' % os.path.dirname(os.path.realpath(__file__)), 'r')
install_requires = [line.rstrip() for line in requirements_file]
base_dir = os.path.dirname(os.path.abspath(__file__))
setup(
name="pyhttp",
version="1.0.0",
description="An object-oriented layer for the HTTP specification. It provides an abstraction for requests, responses, uploaded files, cookies, etc",
long_description="\n\n".join([
open(os.path.join(base_dir, "README.rst"), "r").read(),
]),
url="https://github.com/felixcarmona/pyhttp",
author="Felix Carmona",
author_email="mail@felixcarmona.com",
packages=find_packages(exclude=('pyhttp.tests', 'pyhttp.tests.*')),
zip_safe=False,
install_requires=install_requires,
test_suite="pyhttp.tests.get_tests",
) | felixcarmona/pyhttp | setup.py | Python | mit | 875 |
# -*- encoding: utf8 -*-
#
# The Qubes OS Project, http://www.qubes-os.org
#
# Copyright (C) 2017 Marek Marczykowski-Górecki
# <marmarek@invisiblethingslab.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, see <https://www.gnu.org/licenses/>.
''' Qubes block devices extensions '''
import re
import string
import lxml.etree
import qubes.devices
import qubes.ext
name_re = re.compile(r"^[a-z0-9-]{1,12}$")
device_re = re.compile(r"^[a-z0-9/-]{1,64}$")
# FIXME: any better idea of desc_re?
desc_re = re.compile(r"^.{1,255}$")
mode_re = re.compile(r"^[rw]$")
# all frontends, prefer xvdi
# TODO: get this from libvirt driver?
AVAILABLE_FRONTENDS = ['xvd'+c for c in
string.ascii_lowercase[8:]+string.ascii_lowercase[:8]]
SYSTEM_DISKS = ('xvda', 'xvdb', 'xvdc', 'xvdd')
class BlockDevice(qubes.devices.DeviceInfo):
def __init__(self, backend_domain, ident):
super(BlockDevice, self).__init__(backend_domain=backend_domain,
ident=ident)
self._description = None
self._mode = None
self._size = None
@property
def description(self):
'''Human readable device description'''
if self._description is None:
if not self.backend_domain.is_running():
return self.ident
safe_set = {ord(c) for c in
string.ascii_letters + string.digits + '()+,-.:=_/ '}
untrusted_desc = self.backend_domain.untrusted_qdb.read(
'/qubes-block-devices/{}/desc'.format(self.ident))
if not untrusted_desc:
return ''
desc = ''.join((chr(c) if c in safe_set else '_')
for c in untrusted_desc)
self._description = desc
return self._description
@property
def mode(self):
'''Device mode, either 'w' for read-write, or 'r' for read-only'''
if self._mode is None:
if not self.backend_domain.is_running():
return 'w'
untrusted_mode = self.backend_domain.untrusted_qdb.read(
'/qubes-block-devices/{}/mode'.format(self.ident))
if untrusted_mode is None:
self._mode = 'w'
elif untrusted_mode not in (b'w', b'r'):
self.backend_domain.log.warning(
'Device {} has invalid mode'.format(self.ident))
self._mode = 'w'
else:
self._mode = untrusted_mode.decode()
return self._mode
@property
def size(self):
'''Device size in bytes'''
if self._size is None:
if not self.backend_domain.is_running():
return None
untrusted_size = self.backend_domain.untrusted_qdb.read(
'/qubes-block-devices/{}/size'.format(self.ident))
if untrusted_size is None:
self._size = 0
elif not untrusted_size.isdigit():
self.backend_domain.log.warning(
'Device {} has invalid size'.format(self.ident))
self._size = 0
else:
self._size = int(untrusted_size)
return self._size
@property
def device_node(self):
'''Device node in backend domain'''
return '/dev/' + self.ident.replace('_', '/')
class BlockDeviceExtension(qubes.ext.Extension):
@qubes.ext.handler('domain-init', 'domain-load')
def on_domain_init_load(self, vm, event):
'''Initialize watching for changes'''
# pylint: disable=unused-argument,no-self-use
vm.watch_qdb_path('/qubes-block-devices')
@qubes.ext.handler('domain-qdb-change:/qubes-block-devices')
def on_qdb_change(self, vm, event, path):
'''A change in QubesDB means a change in device list'''
# pylint: disable=unused-argument,no-self-use
vm.fire_event('device-list-change:block')
def device_get(self, vm, ident):
# pylint: disable=no-self-use
'''Read information about device from QubesDB
:param vm: backend VM object
:param ident: device identifier
:returns BlockDevice'''
untrusted_qubes_device_attrs = vm.untrusted_qdb.list(
'/qubes-block-devices/{}/'.format(ident))
if not untrusted_qubes_device_attrs:
return None
return BlockDevice(vm, ident)
@qubes.ext.handler('device-list:block')
def on_device_list_block(self, vm, event):
# pylint: disable=unused-argument,no-self-use
if not vm.is_running():
return
untrusted_qubes_devices = vm.untrusted_qdb.list('/qubes-block-devices/')
untrusted_idents = set(untrusted_path.split('/', 3)[2]
for untrusted_path in untrusted_qubes_devices)
for untrusted_ident in untrusted_idents:
if not name_re.match(untrusted_ident):
msg = ("%s vm's device path name contains unsafe characters. "
"Skipping it.")
vm.log.warning(msg % vm.name)
continue
ident = untrusted_ident
device_info = self.device_get(vm, ident)
if device_info:
yield device_info
@qubes.ext.handler('device-get:block')
def on_device_get_block(self, vm, event, ident):
# pylint: disable=unused-argument,no-self-use
if not vm.is_running():
return
if not vm.app.vmm.offline_mode:
device_info = self.device_get(vm, ident)
if device_info:
yield device_info
@qubes.ext.handler('device-list-attached:block')
def on_device_list_attached(self, vm, event, **kwargs):
# pylint: disable=unused-argument,no-self-use
if not vm.is_running():
return
xml_desc = lxml.etree.fromstring(vm.libvirt_domain.XMLDesc())
for disk in xml_desc.findall('devices/disk'):
if disk.get('type') != 'block':
continue
dev_path_node = disk.find('source')
if dev_path_node is None:
continue
dev_path = dev_path_node.get('dev')
target_node = disk.find('target')
if target_node is not None:
frontend_dev = target_node.get('dev')
if not frontend_dev:
continue
if frontend_dev in SYSTEM_DISKS:
continue
else:
continue
backend_domain_node = disk.find('backenddomain')
if backend_domain_node is not None:
backend_domain = vm.app.domains[backend_domain_node.get('name')]
else:
backend_domain = vm.app.domains[0]
options = {}
read_only_node = disk.find('readonly')
if read_only_node is not None:
options['read-only'] = 'yes'
else:
options['read-only'] = 'no'
options['frontend-dev'] = frontend_dev
if disk.get('device') != 'disk':
options['devtype'] = disk.get('device')
if dev_path.startswith('/dev/'):
ident = dev_path[len('/dev/'):]
else:
ident = dev_path
ident = ident.replace('/', '_')
yield (BlockDevice(backend_domain, ident), options)
def find_unused_frontend(self, vm):
# pylint: disable=no-self-use
'''Find unused block frontend device node for <target dev=.../>
parameter'''
assert vm.is_running()
xml = vm.libvirt_domain.XMLDesc()
parsed_xml = lxml.etree.fromstring(xml)
used = [target.get('dev', None) for target in
parsed_xml.xpath("//domain/devices/disk/target")]
for dev in AVAILABLE_FRONTENDS:
if dev not in used:
return dev
return None
@qubes.ext.handler('device-pre-attach:block')
def on_device_pre_attached_block(self, vm, event, device, options):
# pylint: disable=unused-argument
# validate options
for option, value in options.items():
if option == 'frontend-dev':
if not value.startswith('xvd') and not value.startswith('sd'):
raise qubes.exc.QubesValueError(
'Invalid frontend-dev option value: ' + value)
elif option == 'read-only':
options[option] = (
'yes' if qubes.property.bool(None, None, value) else 'no')
elif option == 'devtype':
if value not in ('disk', 'cdrom'):
raise qubes.exc.QubesValueError(
'devtype option can only have '
'\'disk\' or \'cdrom\' value')
else:
raise qubes.exc.QubesValueError(
'Unsupported option {}'.format(option))
if 'read-only' not in options:
options['read-only'] = 'yes' if device.mode == 'r' else 'no'
if options.get('read-only', 'no') == 'no' and device.mode == 'r':
raise qubes.exc.QubesValueError(
'This device can be attached only read-only')
if not vm.is_running():
return
if not device.backend_domain.is_running():
raise qubes.exc.QubesVMNotRunningError(device.backend_domain,
'Domain {} needs to be running to attach device from '
'it'.format(device.backend_domain.name))
if 'frontend-dev' not in options:
options['frontend-dev'] = self.find_unused_frontend(vm)
vm.libvirt_domain.attachDevice(
vm.app.env.get_template('libvirt/devices/block.xml').render(
device=device, vm=vm, options=options))
@qubes.ext.handler('device-pre-detach:block')
def on_device_pre_detached_block(self, vm, event, device):
# pylint: disable=unused-argument,no-self-use
if not vm.is_running():
return
# need to enumerate attached device to find frontend_dev option (at
# least)
for attached_device, options in self.on_device_list_attached(vm, event):
if attached_device == device:
vm.libvirt_domain.detachDevice(
vm.app.env.get_template('libvirt/devices/block.xml').render(
device=device, vm=vm, options=options))
break
| woju/qubes-core-admin | qubes/ext/block.py | Python | lgpl-2.1 | 11,039 |
#!/usr/bin/env python
import logging
import os
import runpy
import shutil
import sys
import textwrap
import argparse
import config
from path import path
import utils
# Be a good neighbour.
if sys.platform == 'win32':
GLOBAL_CONFIG_FILE = 'tipfy.cfg'
else:
GLOBAL_CONFIG_FILE = '.tipfy.cfg'
MISSING_GAE_SDK_MSG = "%(script)r wasn't found. Add the App Engine SDK to " \
"sys.path or configure sys.path in tipfy.cfg."
class Action(object):
"""Base interface for custom actions."""
#: Action name.
name = None
#: ArgumentParser description.
description = None
#: ArgumentParser epilog.
epilog = None
def __init__(self, manager, name):
self.manager = manager
self.name = name
self.logger = logging.getLogger(name)
def __call__(self, argv):
raise NotImplementedError()
def get_config_section(self):
sections = [self.manager.app, 'tipfy']
return ['%s:%s' % (s, self.name) for s in sections if s]
def error(self, message, status=1):
"""Displays an error message and exits."""
self.logger.error(message)
sys.exit(status)
def run_hooks(self, import_names, args):
"""Executes a list of functions defined as strings. They are imported
dynamically so their modules must be in sys.path. If any of the
functions isn't found, none will be executed.
"""
# Import all first.
hooks = []
for import_name in import_names:
hook = utils.import_string(import_name, True)
if hook is None:
self.error('Could not import %r.' % import_name)
hooks.append(hook)
# Execute all.
for hook in hooks:
hook(self.manager, args)
class CreateAppAction(Action):
"""Creates a directory for a new tipfy app."""
description = 'Creates a directory for a new App Engine app.'
def get_parser(self):
parser = argparse.ArgumentParser(
description=self.description
)
parser.add_argument(
'app_dir',
help='App directory or directories.',
nargs='+'
)
parser.add_argument(
'-t', '--template',
dest='template',
help='App template, copied to the new project directory. '
'If not defined, the default app skeleton is used.'
)
return parser
def __call__(self, argv):
manager = self.manager
section = self.get_config_section()
parser = self.get_parser()
args = parser.parse_args(args=argv)
template_dir = args.template
if not template_dir:
# Try getting the template set in config.
template_dir = manager.config.get(section, 'appengine_stub')
if not template_dir:
# Use default template.
curr_dir = os.path.dirname(os.path.realpath(__file__))
template_dir = os.path.join(curr_dir, 'stubs', 'appengine')
template_dir = os.path.abspath(template_dir)
if not os.path.exists(template_dir):
self.error('Template directory not found: %r.' % template_dir)
for app_dir in args.app_dir:
app_dir = os.path.abspath(app_dir)
self.create_app(app_dir, template_dir)
def create_app(self, app_dir, template_dir):
if os.path.exists(app_dir):
self.error('Project directory already exists: %r.' % app_dir)
shutil.copytree(template_dir, app_dir)
class GaeSdkAction(Action):
"""This is just a wrapper for tools found in the Google App Engine SDK.
It delegates all arguments to the SDK script and no additional arguments
are parsed.
"""
def __call__(self, argv):
sys.argv = [self.name] + argv
try:
runpy.run_module(self.name, run_name='__main__', alter_sys=True)
except ImportError:
self.error(MISSING_GAE_SDK_MSG % dict(script=self.name))
class GaeSdkExtendedAction(Action):
"""Base class for actions that wrap the App Engine SDK scripts to make
them configurable or to add before/after hooks. It accepts all options
from the correspondent SDK scripts, but they can be configured in
tipfy.cfg.
"""
options = []
def get_base_gae_argv(self):
raise NotImplementedError()
def get_getopt_options(self):
for option in self.options:
if isinstance(option, tuple):
long_option, short_option = option
else:
long_option = option
short_option = None
is_bool = not long_option.endswith('=')
long_option = long_option.strip('=')
yield long_option, short_option, is_bool
def get_parser_from_getopt_options(self):
manager = self.manager
section = self.get_config_section()
usage = '%%(prog)s %(action)s [--config CONFIG] [--app APP] ' \
'[options]' % dict(action=self.name)
parser = argparse.ArgumentParser(
description=self.description,
usage=usage,
formatter_class=argparse.RawDescriptionHelpFormatter,
add_help=False
)
for long_option, short_option, is_bool in self.get_getopt_options():
args = ['--%s' % long_option]
kwargs = {}
if short_option:
args.append('-%s' % short_option)
if is_bool:
kwargs['action'] = 'store_true'
kwargs['default'] = manager.config.getboolean(section,
long_option)
else:
kwargs['default'] = manager.config.get(section, long_option)
parser.add_argument(*args, **kwargs)
# Add app path.
app_path = manager.config.get(section, 'path', '')
parser.add_argument('app', nargs='?', default=app_path)
return parser
def get_gae_argv(self, argv):
manager = self.manager
parser = self.get_parser_from_getopt_options()
args, extras = parser.parse_known_args(args=argv)
if args.help:
parser.print_help()
sys.exit(1)
gae_argv = self.get_base_gae_argv()
for long_option, short_option, is_bool in self.get_getopt_options():
value = getattr(args, long_option)
if value is not None:
if is_bool and value:
value = '--%s' % long_option
elif not is_bool:
value = '--%s=%s' % (long_option, value)
if value:
gae_argv.append(value)
# Add app path.
gae_argv.append(os.path.abspath(args.app))
return gae_argv
class GaeRunserverAction(GaeSdkExtendedAction):
"""
A convenient wrapper for "dev_appserver": starts the Google App Engine
development server using before and after hooks and allowing configurable
defaults.
Default values for each option can be defined in tipfy.cfg in the
"tipfy:runserver" section or for the current app, sufixed by ":runserver".
A special variable "app" is replaced by the value from the "--app"
argument:
[tipfy]
path = /path/to/%(app)s
[tipfy:runserver]
debug = true
datastore_path = /path/to/%(app)s.datastore
[my_app:runserver]
port = 8081
In this case, executing:
tipfy runserver --app=my_app
...will expand to:
dev_appserver --datastore_path=/path/to/my_app.datastore --debug --port=8081 /path/to/my_app
Define in "before" and "after" a list of functions to run before and after
the server executes. These functions are imported so they must be in
sys.path. For example:
[tipfy:runserver]
before =
hooks.before_runserver_1
hooks.before_runserver_2
after =
hooks.after_runserver_1
hooks.after_runserver_2
Then define in the module "hooks.py" some functions to be executed:
def before_runserver_1(manager, args):
print 'before_runserver_1!'
def after_runserver_1(manager, args):
print 'after_runserver_1!'
# ...
Use "tipfy dev_appserver --help" for a description of each option.
"""
description = textwrap.dedent(__doc__)
# All options from dev_appserver in a modified getopt style.
options = [
('address=', 'a'),
'admin_console_server=',
'admin_console_host=',
'allow_skipped_files',
'auth_domain=',
('clear_datastore', 'c'),
'blobstore_path=',
'datastore_path=',
'use_sqlite',
('debug', 'd'),
'debug_imports',
'enable_sendmail',
'disable_static_caching',
'show_mail_body',
('help', 'h'),
'history_path=',
'mysql_host=',
'mysql_port=',
'mysql_user=',
'mysql_password=',
('port=', 'p'),
'require_indexes',
'smtp_host=',
'smtp_password=',
'smtp_port=',
'smtp_user=',
'disable_task_running',
'task_retry_seconds=',
'template_dir=',
'trusted',
]
def get_base_gae_argv(self):
return ['dev_appserver']
def __call__(self, argv):
manager = self.manager
section = self.get_config_section()
before_hooks = manager.config.getlist(section, 'before', [])
after_hooks = manager.config.getlist(section, 'after', [])
# Assemble arguments.
sys.argv = self.get_gae_argv(argv)
# Execute before scripts.
self.run_hooks(before_hooks, argv)
script = 'dev_appserver'
try:
self.logger.info('Executing: %s' % ' '.join(sys.argv))
runpy.run_module(script, run_name='__main__', alter_sys=True)
except ImportError:
self.error(MISSING_GAE_SDK_MSG % dict(script=script))
finally:
# Execute after scripts.
self.run_hooks(after_hooks, argv)
class GaeDeployAction(GaeSdkExtendedAction):
"""
A convenient wrapper for "appcfg update": deploys to Google App Engine
using before and after hooks and allowing configurable defaults.
Default values for each option can be defined in tipfy.cfg in the
"tipfy:deploy" section or for the current app, sufixed by ":deploy".
A special variable "app" is replaced by the value from the "--app"
argument:
[tipfy]
path = /path/to/%(app)s
[tipfy:deploy]
verbose = true
[my_app:deploy]
email = user@gmail.com
no_cookies = true
In this case, executing:
tipfy deploy --app=my_app
...will expand to:
appcfg update --verbose --email=user@gmail.com --no_cookies /path/to/my_app
Define in "before" and "after" a list of functions to run before and after
deployment. These functions are imported so they must be in sys.path.
For example:
[tipfy:deploy]
before =
hooks.before_deploy_1
hooks.before_deploy_2
after =
hooks.after_deploy_1
hooks.after_deploy_2
Then define in the module "hooks.py" some functions to be executed:
def before_deploy_1(manager, args):
print 'before_deploy_1!'
def after_deploy_1(manager, args):
print 'after_deploy_1!'
# ...
Use "tipfy appcfg update --help" for a description of each option.
"""
description = textwrap.dedent(__doc__)
# All options from appcfg update in a modified getopt style.
options = [
('help', 'h'),
('quiet', 'q'),
('verbose', 'v'),
'noisy',
('server=', 's'),
'insecure',
('email=', 'e'),
('host=', 'H'),
'no_cookies',
'passin',
('application=', 'A'),
('version=', 'V'),
('max_size=', 'S'),
'no_precompilation',
]
def get_base_gae_argv(self):
return ['appcfg', 'update']
def __call__(self, argv):
manager = self.manager
section = self.get_config_section()
before_hooks = manager.config.getlist(section, 'before', [])
after_hooks = manager.config.getlist(section, 'after', [])
# Assemble arguments.
sys.argv = self.get_gae_argv(argv)
# Execute before scripts.
self.run_hooks(before_hooks, argv)
script = 'appcfg'
try:
self.logger.info('Executing: %s' % ' '.join(sys.argv))
runpy.run_module(script, run_name='__main__', alter_sys=True)
except ImportError:
self.error(MISSING_GAE_SDK_MSG % dict(script=script))
finally:
# Execute after scripts.
self.run_hooks(after_hooks, argv)
class BuildAction(Action):
description = 'Installs packages in the app directory.'
cache_path = 'var/cache/packages'
pin_file = 'var/%(app)s_pinned_versions.txt'
def get_parser(self):
manager = self.manager
# XXX cache option
# XXX symlinks option
section = self.get_config_section()
parser = argparse.ArgumentParser(
description=self.description
)
parser.add_argument(
'--from_pin_file',
help='Install package versions defined in this pin file.',
default=manager.config.get(section, 'from_pin_file')
)
parser.add_argument(
'--pin_file',
help='Name of the file to save pinned versions.',
default=manager.config.get(section, 'pin_file', self.pin_file)
)
parser.add_argument(
'--no_pin_file',
help="Don't create a pin file after installing the packages.",
action='store_true',
default=manager.config.getboolean(section, 'no_pin_file', False)
)
parser.add_argument(
'--cache_path',
help='Directory to store package cache.',
default=manager.config.get(section, 'cache_path', self.cache_path)
)
parser.add_argument(
'--no_cache',
help="Don't use package cache.",
action='store_true',
default=manager.config.getboolean(section, 'no_cache', False)
)
parser.add_argument(
'--no_symlink',
help="Move packages to app directory instead of creating "
"symlinks. Always active on Windows.",
action='store_true',
default=manager.config.getboolean(section, 'no_symlink', False)
)
return parser
def __call__(self, argv):
manager = self.manager
if not manager.app:
self.error('Missing app. Use --app=APP_NAME to define the current '
'app.')
parser = self.get_parser()
args = parser.parse_args(args=argv)
if args.from_pin_file:
packages_to_install = self.read_pin_file(args.from_pin_file)
else:
packages_to_install = manager.config.getlist(section, 'packages',
[])
if not packages_to_install:
self.error('Missing list of packages to install.')
if sys.platform == 'win32':
args.no_symlink = True
packages = []
if not args.no_pin_file:
pin_file = args.pin_file % dict(app=manager.app)
self.save_pin_file(pin_file, packages)
def save_pin_file(self, pin_file, packages):
# XXX catch errors
f = open(pin_file, 'w+')
f.write('\n'.join(packages))
f.close()
def read_pin_file(self, pin_file):
# XXX catch errors
f = open(pin_file, 'r')
contents = f.read()
f.close()
packages = [line.strip() for line in contents.splitlines()]
return [line for line in packages if line]
def _get_package_finder(self):
# XXX make mirrors configurable
from pip.index import PackageFinder
find_links = []
use_mirrors = False
mirrors = []
index_urls = ['http://pypi.python.org/simple/']
return PackageFinder(find_links=find_links, index_urls=index_urls,
use_mirrors=use_mirrors, mirrors=mirrors)
class InstallAppengineSdkAction(Action):
"""Not implemented yet."""
description = 'Downloads and unzips the App Engine SDK.'
def get_parser(self):
parser = argparse.ArgumentParser(
description=self.description
)
parser.add_argument(
'--version', '-v',
help='SDK version. If not defined, downloads the latest stable '
'one.'
)
return parser
def __call__(self, argv):
manager = self.manager
parser = self.get_parser()
raise NotImplementedError()
class TestAction(Action):
"""Testing stuff."""
def __call__(self, argv):
manager = self.manager
print manager.app
class TipfyManager(object):
description = 'Tipfy Management Utilities.'
epilog = 'Use "%(prog)s action --help" for help on specific actions.'
# XXX Allow users to hook in custom actions.
actions = {
# Wrappers for App Engine SDK tools.
'appcfg': GaeSdkAction,
'bulkload_client': GaeSdkAction,
'bulkloader': GaeSdkAction,
'dev_appserver': GaeSdkAction,
'remote_api_shell': GaeSdkAction,
# For now these are App Engine specific.
'runserver': GaeRunserverAction,
'deploy': GaeDeployAction,
# Extra ones.
#'install_gae_sdk': InstallAppengineSdkAction(),
'create_app': CreateAppAction,
'build': BuildAction,
'test': TestAction,
}
def __init__(self):
pass
def __call__(self, argv):
parser = self.get_parser()
args, extras = parser.parse_known_args(args=argv)
# Load configuration.
self.parse_config(args.config)
# XXX load other actions based on definitions from config.
if args.action not in self.actions:
# Unknown action or --help.
return parser.print_help()
# Load config fom a specific app, if defined, or use default one.
self.app = args.app or self.config.get('tipfy', 'app')
# Fallback to the tipfy section.
self.config_section = ['tipfy']
if self.app:
self.config_section.insert(0, self.app)
# If app is set, an 'app' value can be used in expansions.
if self.app:
self.config.set('DEFAULT', 'app', self.app)
# Prepend configured paths to sys.path, if any.
sys.path[:0] = self.config.getlist(self.config_section, 'sys.path', [])
# Current cwd and app paths.
self.cwd_path = path.getcwd()
if self.app:
default_app_path = self.cwd_path.joinpath(self.app)
self.app_path = path(self.config.get(self.config_section, 'path',
default_app_path)).abspath()
else:
self.app_path = None
if args.help:
# Delegate help to action.
extras.append('--help')
action = self.actions[args.action](self, args.action)
return action(extras)
def get_parser(self):
actions = ', '.join(sorted(self.actions.keys()))
parser = argparse.ArgumentParser(
description=self.description,
epilog=self.epilog,
add_help=False
)
parser.add_argument(
'action',
help='Action to perform. Available actions are: %s.' % actions,
nargs='?'
)
parser.add_argument(
'--config',
default='tipfy.cfg',
help='Configuration file. If not provided, uses tipfy.cfg from '
'the current directory.'
)
parser.add_argument(
'--app',
help='App configuration to use.'
)
parser.add_argument(
'-h', '--help',
help='Show this help message and exit.',
action='store_true'
)
return parser
def parse_config(self, config_file):
"""Load configuration. If files are not specified, try 'tipfy.cfg'
in the current dir.
"""
self.config_files = [
# Global configuration, saved in user dir.
path('~').expanduser().joinpath(GLOBAL_CONFIG_FILE).realpath(),
# Project configuration, saved in current or provided dir.
path(config_file).abspath().realpath(),
]
self.config = config.Config()
self.config_loaded = self.config.read(self.config_files)
def main():
manager = TipfyManager()
manager(sys.argv[1:])
if __name__ == '__main__':
main()
| adilhz/tipfy | manage/__init__.py | Python | bsd-3-clause | 20,992 |
from flask_testing import TestCase
from project import create_app, logger, db
import os
import logging
app = create_app()
class BaseTestCase(TestCase):
def create_app(self):
app.config.from_object('config.TestingConfig')
logger.setLevel(logging.ERROR)
return app
def setUp(self):
db.session.remove()
db.drop_all()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
| Radu-Raicea/Stock-Analyzer | flask/tests/base_database_test.py | Python | bsd-3-clause | 467 |
# -*- coding: utf-8 -*-
#
# This file is part of CERN Document Server.
# Copyright (C) 2016 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""CDS API to use Sorenson transcoding server."""
from __future__ import absolute_import, print_function
import os
from werkzeug.utils import cached_property
from . import config
class CDSSorenson(object):
"""CDS Sorenson extension."""
def __init__(self, app=None):
"""Extension initialization."""
if app:
self.app = app
self.init_app(app)
def init_app(self, app):
"""Flask application initialization."""
self.init_config(app)
app.extensions['cds-sorenson'] = self
def init_config(self, app):
"""Initialize configuration."""
for k in dir(config):
if k.startswith('CDS_SORENSON_'):
app.config.setdefault(k, getattr(config, k))
# Set the proxies
if (os.environ.get('APP_CDS_SORENSON_PROXIES_HTTP') and
os.environ.get('APP_CDS_SORENSON_PROXIES_HTTPS')):
app.config['CDS_SORENSON_PROXIES'] = {
'http': os.environ.get('APP_CDS_SORENSON_PROXIES_HTTP'),
'https': os.environ.get('APP_CDS_SORENSON_PROXIES_HTTPS')
}
@cached_property
def aspect_ratio_fractions(self):
"""Map aspect ratios with their computed fractions."""
fractions_with_ar = {}
for ar in self.app.config['CDS_SORENSON_PRESETS']:
sorenson_w, sorenson_h = ar.split(':')
sorenson_ar_fraction = float(sorenson_w) / float(sorenson_h)
fractions_with_ar.setdefault(sorenson_ar_fraction, ar)
return fractions_with_ar
| CERNDocumentServer/cds-sorenson | cds_sorenson/ext.py | Python | gpl-2.0 | 2,551 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*
# Supervisor process - reads config.yaml and checks services
# installation on Ubuntu
# sudo apt-get install python3-yaml
# sudo pip3 install paho-mqtt
#
# or (if running python 2.7):
# sudo apt-get install python-yaml
# sudo pip install paho-mqtt
# + backport of ipaddress to python 2.7:
# sudo apt-get install python-ipaddress
#
# If your user has trouble accessing paho-mqtt, execute the following commands:
# sudo find /usr/local/lib/python3.4/dist-packages/ -type d -exec chmod 755 {} ';'
# sudo find /usr/local/lib/python3.4/dist-packages/ -type f -exec chmod 644 {} ';'
import logging
import services
import sched
import time
import sys
import yaml
import paho.mqtt.client as paho
logging.basicConfig(level=logging.INFO)
# read yaml configuration
def read_configuration(file='config.yaml'):
logging.info("Opening config file " + file)
with open(file, 'r') as myfile:
data = myfile.read()
return yaml.load(data)
try:
config = read_configuration()
except Exception as e:
print("Could not read configuration: " + str(e))
sys.exit(1)
# open mqtt client
client = paho.Client()
# set user/password
if config['mqtt_broker']['use_username_password']:
client.username_pw_set(config['mqtt_broker']['username'], config['mqtt_broker']['password'])
# set tls
if config['mqtt_broker']['use_ca']:
client.tls_set(config['mqtt_broker']['ca_cert'])
client.tls_insecure_set(config['mqtt_broker']['ca_insecure'])
# connect
try:
client.connect(config['mqtt_broker']['hostname'], config['mqtt_broker']['port'], config['mqtt_broker']['keepalive'])
logging.info("Connected to broker " + config['mqtt_broker']['hostname'] + ":" + str(config['mqtt_broker']['port']))
except Exception as e:
print("Could not connect with mqtt broker: " + str(e))
sys.exit(1)
# add client to config
config['settings']['mqtt_client'] = client
# define check function to run on scheduler
def check_service(service, sc):
logging.info("Running " + service.name)
service.execute()
# re-enter service
sc.enter(service.check_frequency, 1, check_service, (service, sc,))
# initialize scheduler
s = sched.scheduler(time.time, time.sleep)
# get services and add them to scheduler
for key, service_config in config['services'].items():
service = services.service_factory(service_config['type'], key, service_config, config['settings'])
# add to scheduler
s.enter(0, 1, check_service, (service, s,))
# run scheduler
s.run()
| mkalus/mqtt-supervisor | mqtt_supervisor.py | Python | mit | 2,529 |
import traceback
class DbUtils(object):
#FIXME change to plugin style and read query from custom config files
db = None
db_conn = None
def __init__(self, db):
self.db = db
def __del__(self):
# dtor
if self.db_conn:
self.db_conn.close()
def connect(self):
db = self.db
protocol = db['protocol']
if protocol.find('mysql') == -1:
print('Do not support database other than mysql at the moment')
return False
dblib = None
try:
import MySQLdb as dblib
except:
pass
if not dblib:
print('No MySQL library installed for python')
return False
try:
self.db_conn = dblib.connect(host=db['host'], port=int(db['port']),
user=db['username'], passwd=db['password'],
db=db['dbname'])
except:
print('Failed to open connection to db {} using user {}'.format(db['host'], db['username']))
#print traceback.print_exc()
return False
return True
def query(self, sql):
if not self.db_conn:
print('Can\'t query until connection is established.')
return False
try:
cur = self.db_conn.cursor()
except:
print('Failed to create connection cursor.')
#print traceback.print_exc()
return False
try:
cur.execute(sql)
except:
cur.close()
print('Failed to execute SQL command.')
#print traceback.print_exc()
return False
cur.close()
return True | hsrky/pythoninfo | pythoninfo/lib/db_utils.py | Python | mit | 1,831 |
#!/usr/bin/python3
#coding=utf-8
'''
**************************
* File Name :base_line.py
* Author:Charlley88
* Mail:charlley88@163.com
**************************
'''
import sys
sys.path.append('..')
import insummer
from insummer.query_expansion.entity_finder import NgramEntityFinder
from insummer.summarization.lexrank import LexRank
from insummer.summarization.textrank import TextRank
finder = NgramEntityFinder
import pickle
import data
import logging
from optparse import OptionParser
def exp(questions,qnum,method):
for i in range(qnum):
print('问题 : %s'%(i))
q = questions[i]
lexer = LexRank(q,250)
result = lexer.extract()
lexer.evaluation(result,'lex')
if __name__ == "__main__":
print(__doc__)
#parser = OptionParser()
#parser.add_option('-d','--data',dest='data',help='选择数据集')
#parser.add_option('-m','--method',dest='method',help='算法选择')
#(options,args) = parser.parse_args()
print('loading the data..')
duc_question = pickle.load(open('/home/lavi/project/insummer/question_data/duc_question.pkl','rb'))
#method = options.method
exp(duc_question,4,'lex')
| lavizhao/insummer | code/exp/base_line.py | Python | mit | 1,195 |
# -*- coding: utf-8 -*-
import pytest
import mock
from itertools import combinations
from biseqt.sequence import Alphabet
from biseqt.stochastics import rand_seq, MutationProcess, rand_read
from biseqt.stochastics import np # to mock
def test_rand_seq():
_bak = np.random.choice
np.random.choice = mock.Mock(return_value=[0, 0, 0])
A = Alphabet('ACGT')
assert rand_seq(A, 10) == A.parse('AAA')
np.random.choice = _bak
def test_expected_coverage():
A = Alphabet('ACGT')
S = rand_seq(A, 100)
cov = 10
reads = [r for r in rand_read(S, len_mean=len(S)/2, expected_coverage=cov)]
assert len(reads) == 2 * cov
def test_lossless_reads():
A = Alphabet('ACGT')
S = rand_seq(A, 100)
with pytest.raises(AssertionError):
next(rand_read(S, len_mean=200, num=1)) # len_mean must be < len(S)
with pytest.raises(AssertionError):
# at most one of num or expected_coverage given
next(rand_read(S, len_mean=50, num=1, expected_coverage=1))
assert sum(1 for _ in rand_read(S, len_mean=50, num=10)) == 10, \
'The number of sampled reads should be controllable'
assert sum(1 for _ in rand_read(S, len_mean=50)) == 1, \
'If neither num or expected coverage is given only one sample is read'
# there should be no noise added
read, pos = next(rand_read(S, len_mean=40, num=1))
assert S[pos:pos+len(read)] == read
S = A.parse('ACT' * 100)
reads = [x for x in rand_read(S, len_mean=100, len_sd=0.01, num=100)]
assert set(len(read) for read, _ in reads) > 1, \
'Read lengths should be randomly chosen'
len_mean = sum(len(read) for read, _ in reads) / 100.
assert len_mean > 50 and len_mean < 150, \
'Normal distribution of read lengths works'
# index edge cases
A = Alphabet(['00', '01'])
S = A.parse('01' * 10)
_bak = np.random.normal
np.random.normal = mock.Mock(return_value=[1])
assert next(rand_read(S, len_mean=1, num=1))[0] == A.parse('01'), \
'sequences in alphabets with > 1 long letters can be sampled too'
np.random.normal = _bak
def test_lossy_reads():
A = Alphabet('ACGT')
S = A.parse('ACT' * 100)
gap_kw = {'go_prob': 0.2, 'ge_prob': 0.3}
M = MutationProcess(A, subst_probs=0.3, **gap_kw)
read, pos, tx = next(M.noisy_read(S, len_mean=50, num=1))
assert tx.count('S') > 0 and tx.count('I') + tx.count('D') > 0, \
'Random mutations should be performed to get lossy reads'
def test_mutation_process():
A = Alphabet('ACGT')
S = A.parse('ACT' * 100)
gap_kw = {'go_prob': 0, 'ge_prob': 0}
T, tx = MutationProcess(A, subst_probs=0, **gap_kw).mutate(S)
assert T == S and tx == 'MMM' * 100, \
'all mutation probabilities can be set to zero'
T, tx = MutationProcess(A, subst_probs=0.1, **gap_kw).mutate(S)
assert all(op in 'MS' for op in tx) and 'S' in tx, \
'there can be mutation processes with only substitutions'
T, tx = MutationProcess(A, subst_probs=0.01, **gap_kw).mutate(S)
assert tx.count('S') < 0.1 * len(S), 'substitution probabilities work'
with pytest.raises(AssertionError):
MutationProcess(A, go_prob=0.2, ge_prob=0.1) # go_prob <= ge_prob
gap_kw = {'go_prob': 0.05, 'ge_prob': 0.1}
T, tx = MutationProcess(A, subst_probs=0, **gap_kw).mutate(S)
indels = sum(1 for op in tx if op in 'ID')
assert indels > 0 and indels < 0.5 * len(S), 'gap probabilities work'
def test_log_odds_scores():
A = Alphabet('ACGT')
# linear gap model
P = MutationProcess(A, subst_probs=.1, ge_prob=.1, go_prob=.1)
subst_scores, (go_score, ge_score) = P.log_odds_scores()
assert go_score == 0. and ge_score < 0
match_pos = [(i, i) for i in range(len(A))]
mismatch_pos = [(i, j) for i, j in combinations(range(len(A)), 2)]
assert all(subst_scores[i][j] < 0 for i, j in mismatch_pos)
assert all(subst_scores[i][j] > 0 for i, j in match_pos)
# affine gap model
P = MutationProcess(A, subst_probs=.1, ge_prob=.2, go_prob=.1)
subst_scores, (go_score, ge_score) = P.log_odds_scores()
assert ge_score < 0
# do mismatch scores go down if subst probs are decreased?
P = MutationProcess(A, subst_probs=.01, ge_prob=.2, go_prob=.1)
new_subst_scores, _ = P.log_odds_scores()
assert new_subst_scores[0][1] < subst_scores[0][1], \
'mismatch scores become more negative with lower mismatch probs'
| amirkdv/biseqt | tests/test_stochastics.py | Python | bsd-3-clause | 4,450 |
from httprpc.client import Client
a = Client("http://127.0.0.1:8000", username='a', password='a')
print a.hello(a=1, b=1)
print a.demo.hello(a=1, b=1)
| ya790206/httprpc-python | client.py | Python | apache-2.0 | 152 |
from mxl import MXLFile
import fileinput
SEP = ','
COLUMNS = (
'time.upper',
'time.lower',
'key.fifths',
'maj.min',
'melody.staff.1',
'melody.staff.2',
'num.dynamic.changes',
'A',
'B',
'C',
'D',
'E',
'F',
'G',
'16th',
'eighth',
'half',
'quarter',
'whole'
)
if __name__ == '__main__':
print SEP.join(COLUMNS)
for line in fileinput.input():
mxlf = MXLFile(line.rstrip('\n'))
num_measures = len(mxlf.get_part(0).get_measures())
note_type_counts = mxlf.get_part(0).get_note_type_counts(staff_num=1)
note_pitch_counts = mxlf.get_part(0).get_note_pitch_counts(staff_num=1)
num_staffs = mxlf.get_part(0).get_num_staffs()
time_signature = mxlf.time_signature()
key_signature = mxlf.key_signature()
melody_staff_0 = mxlf.get_part(0).get_melody_stat(staff_num=1)
if num_staffs > 1:
melody_staff_1 = mxlf.get_part(0).get_melody_stat(staff_num=2)
else:
melody_staff_1 = -1
columns = (
time_signature[0],
time_signature[1],
key_signature[0],
key_signature[1],
melody_staff_0,
melody_staff_1,
len(list(mxlf.get_part(0).get_dynamic_changes())) / float(num_measures),
note_pitch_counts['A'] / float(num_measures),
note_pitch_counts['B'] / float(num_measures),
note_pitch_counts['C'] / float(num_measures),
note_pitch_counts['D'] / float(num_measures),
note_pitch_counts['E'] / float(num_measures),
note_pitch_counts['F'] / float(num_measures),
note_pitch_counts['G'] / float(num_measures),
note_type_counts['16th'] / float(num_measures),
note_type_counts['eighth'] / float(num_measures),
note_type_counts['half'] / float(num_measures),
note_type_counts['quarter'] / float(num_measures),
note_type_counts['whole'] / float(num_measures)
)
print SEP.join(map(str, columns))
| themichaellai/musicxml-stats | output.py | Python | mit | 2,098 |
'''
Kivy standard library imports
'''
import kivy
from kivy.config import Config
#kivy.config.Config.set('graphics','resizable', False) #config needs to be set before kivy.app is imported
Config.set('graphics', 'fullscreen', 'auto')
from kivy.app import App
from time import time
from os.path import dirname, join
from kivy.lang import Builder
from kivy.properties import NumericProperty,StringProperty,BooleanProperty
from kivy.properties import ListProperty,ReferenceListProperty,ObjectProperty
from kivy.animation import Animation
from kivy.uix.screenmanager import Screen
from kivy.core.window import Window
from kivy.uix.widget import Widget
from kivy.uix.bubble import Bubble
from kivy.uix.gridlayout import GridLayout
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.label import Label
from kivy.uix.popup import Popup
from kivy.uix.button import Button
from kivy.uix.rst import RstDocument
from kivy.clock import Clock, mainthread
from kivy.uix.videoplayer import VideoPlayer
'''
Project imports
'''
from data.DatabaseThread import *
from data.StepperControl import *
from data.ServoControl import *
from data.libs.MyKnob import *
from data.libs.garden.mapview import *
| LBCC-SpaceClub/HAB2017 | AntennaTracker/data/libs/Dependencies.py | Python | mit | 1,235 |
import logging
import markdown
from flask import Markup
from flask import render_template
from sqlalchemy import text, column
from pajbot.managers.db import DBManager
from pajbot.models.user import User
from pajbot.models.webcontent import WebContent
from pajbot.modules import ChattersRefreshModule
log = logging.getLogger(__name__)
def init(app):
@app.route("/points")
def points():
with DBManager.create_session_scope() as db_session:
custom_web_content = db_session.query(WebContent).filter_by(page="points").first()
custom_content = ""
if custom_web_content and custom_web_content.content:
try:
custom_content = Markup(markdown.markdown(custom_web_content.content))
except:
log.exception("Unhandled exception in def index")
# rankings is a list of (User, int) tuples (user with their rank)
# note on the efficiency of this query: takes approx. 0.3-0.4 milliseconds on a 5 million user DB
#
# pajbot=# EXPLAIN ANALYZE SELECT * FROM (SELECT *, rank() OVER (ORDER BY points DESC) AS rank FROM "user") AS subquery LIMIT 30;
# QUERY PLAN
# ----------------------------------------------------------------------------------------------------------------------------------------------------------
# Limit (cost=0.43..2.03 rows=30 width=49) (actual time=0.020..0.069 rows=30 loops=1)
# -> WindowAgg (cost=0.43..181912.19 rows=4197554 width=49) (actual time=0.020..0.065 rows=30 loops=1)
# -> Index Scan Backward using user_points_idx on "user" (cost=0.43..118948.88 rows=4197554 width=41) (actual time=0.012..0.037 rows=31 loops=1)
# Planning Time: 0.080 ms
# Execution Time: 0.089 ms
#
# (see also the extensive comment on migration revision ID 2, 0002_create_index_on_user_points.py)
rankings = db_session.query(User, column("rank")).from_statement(
text(
'SELECT * FROM (SELECT *, rank() OVER (ORDER BY points DESC) AS rank FROM "user") AS subquery LIMIT 30'
)
)
chatters_refresh_enabled = ChattersRefreshModule.is_enabled()
chatters_refresh_settings = ChattersRefreshModule.module_settings()
chatters_refresh_interval = ChattersRefreshModule.UPDATE_INTERVAL
return render_template(
"points.html",
top_30_users=rankings,
custom_content=custom_content,
chatters_refresh_enabled=chatters_refresh_enabled,
chatters_refresh_settings=chatters_refresh_settings,
chatters_refresh_interval=chatters_refresh_interval,
)
| pajlada/pajbot | pajbot/web/routes/base/points.py | Python | mit | 2,923 |
#!/usr/bin/env python
a = [5, 3, 6, 4, 1, 2, 0, 8, 9]
class Node(object):
def __init__(self, value, left, right):
self.value, self.left, self.right = value, left, right
def __repr__(self):
return str(self.value)
class BTree(object):
def __init__(self, value):
self.root_node = Node(value, None, None)
def add_node(self, value):
cur_node = self.root_node
new_node = Node(value, None, None)
while True:
if cur_node.value > new_node.value: # Insert left side
if cur_node.left:
cur_node = cur_node.left
continue
else:
cur_node.left = new_node
return
elif cur_node.value < new_node.value: # Insert right side
if cur_node.right:
cur_node = cur_node.right
continue
else:
cur_node.right = new_node
return
break # Same us current node do nothing
def find_depth(self, cur_node=None, depth=0):
if cur_node is None:
cur_node = self.root_node
depth1 = depth2 = depth
if cur_node.left:
depth1 = self.find_depth(cur_node.left, depth + 1)
if cur_node.right:
depth2 = self.find_depth(cur_node.right, depth + 1)
depth = depth1
if depth1 < depth2:
depth = depth2
return depth
def print_nodes(self, cur_node=None, print_array=None, cur_depth=0):
is_root_node = False
if cur_depth == 0:
is_root_node = True
cur_node = cur_node if cur_node else self.root_node
total_depth = self.find_depth(cur_node)
print_array = [[] for i in range(total_depth + 2)]
print_array[cur_depth].append(str(cur_node.value))
if cur_node.left:
self.print_nodes(cur_node.left, print_array, cur_depth + 1)
else:
print_array[cur_depth + 1].append(' ')
if cur_node.right:
self.print_nodes(cur_node.right, print_array, cur_depth + 1)
else:
print_array[cur_depth + 1].append(' ')
if is_root_node:
for i in range(len(print_array)):
print '{}{}'.format(''.join(' ' * (total_depth - i + 1)),
' '.join(print_array[i]))
def find_node(self, value, cur_node=None):
if cur_node is None:
cur_node = self.root_node
if cur_node.value == value:
return cur_node
elif cur_node.value > value:
return self.find_node(value, cur_node.left)
else:
return self.find_node(value, cur_node.right)
def del_node(self, del_value, cur_node=None):
# Find node and parent node
if cur_node is None:
cur_node = self.root_node
parent_node = None
while True:
if cur_node.value == del_value:
break
elif cur_node.value > del_value and cur_node.left is not None:
parent_node = cur_node
cur_node = cur_node.left
continue
elif cur_node.value < del_value and cur_node.right is not None:
parent_node = cur_node
cur_node = cur_node.right
continue
return # Did not find node
if cur_node.left is None or cur_node.right is None:
replacement_node = cur_node.left if cur_node.left else \
cur_node.right
else:
replacement_node = cur_node.left
replacement_node_parent = cur_node
while replacement_node.right:
replacement_node_parent = replacement_node
replacement_node = replacement_node.right
replacement_node_parent.right = None
replacement_node.left = cur_node.left
replacement_node.right = cur_node.right
if parent_node:
if parent_node.left == cur_node:
parent_node.left = replacement_node
else:
parent_node.right = replacement_node
return
else:
self.root_node = replacement_node
if __name__ == '__main__':
btree = BTree(a[0])
for i in a[1:]:
btree.add_node(i)
print a
btree.print_nodes()
found_node = btree.find_node(3)
btree.print_nodes(cur_node=found_node)
btree.del_node(5)
btree.print_nodes()
| Ramyak/CodingPractice | algo_practice/sort/binary_search.py | Python | gpl-2.0 | 4,558 |
import unittest
import src.vector_gen.generateWeatherVectors as gwv
import pandas as pd
from test import test_path as path
class GenerateWeatherVectorsTest(unittest.TestCase):
trajectories_df = None
weather_df = None
def setUp(self):
self.trajectories_df = pd.read_csv(path.trajectories_training_file2)
self.weather_df = pd.read_csv(path.weather_training_file)
def test_get_simple_result(self):
X = gwv.generate_TimeInformationCurrentSituationWeatherVectors(self.trajectories_df, self.weather_df)
self.assertIsNotNone(X)
def test_length_of_timeIimeInformationCurrentSituationWeatherVector_X(self):
X = gwv.generate_TimeInformationCurrentSituationWeatherVectors(self.trajectories_df, self.weather_df)
# 91 days of training data, 12*2hours per day(
number = 7*12
self.assertEqual(len(X), number)
if __name__ == '__main__':
unittest.main()
| Superchicken1/SambaFlow | python/traffic-prediction/test/test_generateWeatherVectors.py | Python | apache-2.0 | 935 |
# =============================================================================
# COPYRIGHT 2013 Brain Corporation.
# License under MIT license (see LICENSE file)
# =============================================================================
import logging
import os
import platform
from utility import run_shell, cp, fix_rpath, safe_remove
from requirement import RequirementException
import shutil
import subprocess
def install(robustus, requirement_specifier, rob_file, ignore_index):
ni_install_dir = os.path.join(robustus.cache, 'OpenNI2')
if requirement_specifier.version is not None:
ni_install_dir += requirement_specifier.version
def in_cache():
return os.path.isfile(os.path.join(ni_install_dir, 'libOpenNI2.so'))
if not in_cache() and not ignore_index:
cwd = os.getcwd()
ni_clone_dir = os.path.join(cwd, 'OpenNI2')
try:
if os.path.isdir(ni_clone_dir):
logging.warn('Directory for cloning OpenNI found, cloning skipped')
else:
logging.info('Cloning OpenNI')
retcode = run_shell(['git', 'clone', 'https://github.com/occipital/OpenNI2.git'])
if retcode != 0:
raise RequirementException('OpenNI2 clone failed')
os.chdir(ni_clone_dir)
# checkout requested version
branch = requirement_specifier.version if requirement_specifier.version is not None else 'master'
if requirement_specifier.version is not None:
retcode = run_shell(['git', 'checkout', branch])
if retcode != 0:
raise RequirementException('OpenNI2 checkout failed')
logging.info('Building OpenNI')
if platform.machine().startswith('arm'):
ver = 'Arm'
# patch flags for arm
file_to_patch = os.path.join(ni_clone_dir, 'ThirdParty/PSCommon/BuildSystem/Platform.Arm')
with open(file_to_patch, "rt") as f:
content = f.read()
with open(file_to_patch, "wt") as f:
f.write(content.replace('-mfloat-abi=softfp', ''))
elif platform.architecture()[0].startswith('64'):
ver = 'x64'
else:
ver = 'x86'
retcode = run_shell(['make', 'PLATFORM=' + ver], verbose=robustus.settings['verbosity'] >= 1)
if retcode != 0:
raise RequirementException('OpenNI2 build failed')
# copy release dir and usb rules to wheelhouse
if os.path.isdir(ni_install_dir):
shutil.rmtree(ni_install_dir)
release_dir = os.path.join(ni_clone_dir, 'Bin', ver + '-Release')
shutil.copytree(release_dir, ni_install_dir)
cp(os.path.join(ni_clone_dir, 'Packaging/Linux/primesense-usb.rules'), ni_install_dir)
finally:
os.chdir(cwd)
safe_remove(ni_clone_dir)
# copy files to venv
if in_cache():
logging.info('Copying OpenNI2 to virtualenv')
cp(os.path.join(ni_install_dir, '*.so'), os.path.join(robustus.env, 'lib'))
cp(os.path.join(ni_install_dir, '*.jar'), os.path.join(robustus.env, 'lib'))
ni_drivers_dir = os.path.join(robustus.env, 'lib/OpenNI2')
if os.path.isdir(ni_drivers_dir):
shutil.rmtree(ni_drivers_dir)
shutil.copytree(os.path.join(ni_install_dir, 'OpenNI2'), ni_drivers_dir)
# copy demo for testing purposes
cp(os.path.join(ni_install_dir, 'SimpleRead'), os.path.join(robustus.env, 'bin'))
fix_rpath(robustus, robustus.env, os.path.join(robustus.env, 'bin/SimpleRead'), os.path.join(robustus.env, 'lib'))
# setup usb rules
logging.info('Configuring udev rules, you may need to reconnect sensor or restart computer')
retcode = run_shell(['sudo', 'cp', os.path.join(ni_install_dir, 'primesense-usb.rules'), '/etc/udev/rules.d/557-primesense-usb.rules'], verbose=robustus.settings['verbosity'] >= 1)
if retcode != 0:
raise RequirementException('Faied to copy udev rules')
# return nonzero code, but seems to work
subprocess.call(['sudo', 'udevadm', 'control', '--reload-rules'])
else:
raise RequirementException('can\'t find OpenNI2-%s in robustus cache' % requirement_specifier.version)
| braincorp/robustus | robustus/detail/install_openni.py | Python | mit | 4,406 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This tool helps you to rebase package to the latest version
# Copyright (C) 2013-2014 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# he Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Authors: Petr Hracek <phracek@redhat.com>
# Tomas Hozza <thozza@redhat.com>
from __future__ import print_function
import subprocess
import os
from rebasehelper.version import VERSION
try:
from setuptools import setup, Command
except:
from distutils.core import setup, Command
class PyTest(Command):
user_options = [('test-runner=',
't',
'test runner to use; by default, multiple py.test runners are tried')]
command_consumes_arguments = True
def initialize_options(self):
self.test_runner = None
self.args = []
def finalize_options(self):
pass
def runner_exists(self, runner):
syspaths = os.getenv('PATH').split(os.pathsep)
for p in syspaths:
if os.path.exists(os.path.join(p, runner)):
return True
return False
def run(self):
# only one test runner => just run the tests
supported = ['2.7', '3.3', '3.4']
potential_runners = ['py.test-' + s for s in supported]
if self.test_runner:
potential_runners = [self.test_runner]
runners = [pr for pr in potential_runners if self.runner_exists(pr)]
if not runners:
raise SystemExit('No test runners available!')
retcode = 0
for runner in runners:
if len(runners) > 1:
print('\n' * 2)
print('Running tests using "{0}":'.format(runner))
cmd = [runner]
for a in self.args:
cmd.append(a)
cmd.append('-v')
cmd.append('test')
t = subprocess.Popen(cmd)
rc = t.wait()
retcode = t.returncode or retcode
raise SystemExit(retcode)
setup(
name='rebasehelper',
version=VERSION,
description='RebaseHelper helps you to rebase your packages.',
keywords='packages,easy,quick',
author='Petr Hracek',
author_email='phracek@redhat.com',
url='https://github.com/phracek/rebase-helper',
license='GPLv2+',
packages=['rebasehelper'],
include_package_data=True,
entry_points={'console_scripts': ['rebase-helper=rebasehelper.cli:CliHelper.run']},
setup_requires=[],
classifiers=['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Software Development',
],
cmdclass={'test': PyTest}
)
| uhliarik/rebase-helper | setup.py | Python | gpl-2.0 | 3,564 |
# Python3
def allLongestStrings(inputArray):
maxLen = max(len(s) for s in inputArray)
return [ s for s in inputArray if len(s) == maxLen ]
| RevansChen/online-judge | Codefights/arcade/intro/level-3/9.All-Longest-Strings/Python/solution1.py | Python | mit | 148 |
# -*- coding: utf-8 -*-
"""
Copyright 2014 Telefonica Investigación y Desarrollo, S.A.U
This file is part of fiware-pep-steelskin
fiware-pep-steelskin is free software: you can redistribute it and/or
modify it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the License,
or (at your option) any later version.
fiware-pep-steelskin is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public
License along with fiware-pep-steelskin.
If not, see http://www.gnu.org/licenses/.
For those usages not covered by the GNU Affero General Public License
please contact with::[iot_support@tid.es]
"""
__author__ = 'Jon Calderin Goñi <jon.caldering@gmail.com>'
from fabric.api import run, env, cd, put, sudo, local, output
import os
import platform
from lettuce import world
def set_variables_config(host_proxied_ip, host_proxied_port, port_listening,
ac_port, ac_ip,
pep_user, pep_password, pep_domain,
ks_ip, ks_port,
log_level,
plug_in, plug_in_extract_action,
bypass_activation='false', bypass_id='',
cache_users='1000', cache_projects='1000', cache_roles='60', administration_port='11211', ac_disable='false', ks_check_headers='true'):
"""
Modify the variables in the PEP config_template file and write the final values in a PEP config file.
:param host_proxied_ip:
:param host_proxied_port:
:param port_listening:
:param ac_port:
:param ac_ip:
:param pep_user:
:param pep_password:
:param pep_domain:
:param ks_ip:
:param ks_port:
:param log_level:
:param plug_in:
:param plug_in_extract_action:
:param bypass_activation:
:param bypass_id:
:param cache_users:
:param cache_projects:
:param cache_roles:
:param administration_port:
:return:
"""
world.log.info('Setting the pep config')
replaces = {
'host_proxied_ip': host_proxied_ip,
'host_proxied_port': host_proxied_port,
'port_listening': port_listening,
'ac_ip': ac_ip,
'ac_port': ac_port,
'ac_disable': ac_disable,
'pep_user': pep_user,
'pep_password': pep_password,
'pep_domain': pep_domain,
'ks_check_headers': ks_check_headers,
'ks_ip': ks_ip,
'ks_port': ks_port,
'log_level': log_level,
'plug_in': plug_in,
'plug_in_extract_action': plug_in_extract_action,
'bypass_activation': bypass_activation,
'bypass_id': bypass_id,
'cache_users': cache_users,
'cache_projects': cache_projects,
'cache_roles': cache_roles,
'administration_port': administration_port
}
path, fl = os.path.split(os.path.realpath(__file__))
if platform.system() == 'Windows':
path += '\\resources\\'
elif platform.system() == 'Linux':
path += '/resources/'
else:
raise NameError('The SO is not recognize, set the config manually')
full_path_template = path + 'config_template.js'
full_path_config = path + 'config.js'
template = open(full_path_template)
config = open(full_path_config, 'w+')
# Check in each line if there is a variable to modify
for line in template.readlines():
for replace in replaces:
string_to_replace = '{{%s}}' % replace
if line.find(string_to_replace) >= 0:
line = line.replace(string_to_replace, replaces[replace])
config.write(line)
template.close()
config.close()
def get_ssh_port(container_name):
"""
Given the name of a container, get the ssh port
:param container_name:
:return:
"""
world.log.info('Getting ssh port of the container')
ret = run('docker port {container_name} 22'.format(container_name=container_name))
return ret.split(':')[1]
def start_docker_pep(ip_host, user_host, password_host, container_user, container_pass, container_name,
pep_path='/fiware-pep-steelskin'):
"""
Given the docker host, get the PEP container and start it with the config defined
:param ip_host:
:param user_host:
:param password_host:
:param container_user:
:param container_pass:
:param container_name:
:param pep_path:
:return:
"""
world.log.info('Starting pep in docker')
env.host_string = ip_host
env.user = user_host
env.password = password_host
output['stdout'] = False
output['running'] = False
output['warnings'] = False
container_port = get_ssh_port(container_name)
start_pep(ip_host, container_user, container_pass, container_port, pep_path)
def stop_docker_pep(ip_host, user_host, password_host, container_user, container_pass, container_name):
"""
Given the docker host, get the PEP container and stop it
:param ip_host:
:param user_host:
:param password_host:
:param container_user:
:param container_pass:
:param container_name:
:return:
"""
world.log.info('Stoping pep in docker')
env.host_string = ip_host
env.user = user_host
env.password = password_host
output['stdout'] = False
output['running'] = False
output['warnings'] = False
container_port = get_ssh_port(container_name)
stop_pep(ip_host, container_user, container_pass, container_port)
def start_pep(ip, user, password, port='22', pep_path='/fiware-pep-steelskin'):
"""
Given a ssh connection data, stop PEP if its running, put the new configuration, and start is.
The machina have to has the "dtach" package
:param ip:
:param user:
:param password:
:param port:
:param pep_path:
:return:
"""
world.log.info('Starting pep in remote')
env.host_string = ip + ':' + port
env.user = user
env.password = password
env.sudo_password = password
output['stdout'] = False
output['running'] = False
output['warnings'] = False
path, fl = os.path.split(os.path.realpath(__file__))
if platform.system() == 'Windows':
config = path + '\\resources\\' + 'config.js'
elif platform.system() == 'Linux':
config = path + '/resources/' + 'config.js'
else:
raise NameError('The SO is not supported')
so = run('cat /etc/issue').split('\n')[0].split(' ')[0]
if so == 'CentOS':
pid = sudo('ps -ef | grep "node bin/pepProxy" | grep -v grep | awk \'{print $2}\'')
elif so == 'Ubuntu':
pid = sudo('ps -ef | grep "nodejs bin/pepProxy" | grep -v grep | awk \'{print $2}\'')
else:
raise NameError('Pep only can be started in Ubuntu and CentOS systems')
if pid != '':
for proc_pid in pid.split('\n'):
sudo('kill -9 {pid}'.format(pid=proc_pid.strip()))
with cd(pep_path):
put(config, '{path}/config.js'.format(path=pep_path))
sudo('mkdir -p {pep_tmp_dir}'.format(pep_tmp_dir=world.pep_tmp_dir))
if so == 'CentOS':
resp = sudo('dtach -n `mktemp -u {pep_tmp_dir}/dtach.XXXX` /bin/bash -c \' node bin/pepProxy >> {pep_tmp_dir}/pep.log\''.format(pep_tmp_dir=world.pep_tmp_dir))
elif so == 'Ubuntu':
resp =sudo('dtach -n `mktemp -u {pep_tmp_dir}/dtach.XXXX` /bin/bash -c \' nodejs bin/pepProxy >> {pep_tmp_dir}/pep.log\''.format(pep_tmp_dir=world.pep_tmp_dir))
else:
raise NameError('Pep only can be started in Ubuntu and CentOS systems')
world.log.debug('The response initializing pep in remote is: {resp}'.format(resp=resp))
def stop_pep(ip, user, password, port='22'):
"""
Stop pep process
:param ip:
:param user:
:param password:
:param port:
:return:
"""
world.log.info('Stoping pep in remote')
env.host_string = ip + ':' + port
env.user = user
env.password = password
env.sudo_password = password
output['stdout'] = False
output['running'] = False
output['warnings'] = False
so = run('cat /etc/issue').split('\n')[0].split(' ')[0]
if so == 'CentOS':
pid = sudo('ps -ef | grep "node bin/pepProxy" | grep -v grep | awk \'{print $2}\'')
elif so == 'Ubuntu':
pid = sudo('ps -ef | grep "nodejs bin/pepProxy" | grep -v grep | awk \'{print $2}\'')
else:
raise NameError('Pep only can be started in Ubuntu and CentOS systems')
if pid != '':
for proc_pid in pid.split('\n'):
sudo('kill -9 {pid}'.format(pid=proc_pid.strip()))
def start_pep_local(pep_path='/fiware-pep-steelskin'):
"""
Given a ssh connection data, stop PEP if its running, put the new configuration, and start is.
The machine has to have the "dtach" package
:param pep_path:
:return:
"""
world.log.info('Starting pep in local')
output['stdout'] = False
output['running'] = False
output['warnings'] = False
path, fl = os.path.split(os.path.realpath(__file__))
if platform.system() == 'Windows':
config = path + '\\resources\\' + 'config.js'
elif platform.system() == 'Linux':
config = path + '/resources/' + 'config.js'
else:
raise NameError('The SO is not supported')
so = local('cat /etc/issue', capture=True).split('\n')[0].split(' ')[0]
if so == 'CentOS':
pid = local('ps -ef | grep "node bin/pepProxy" | grep -v grep | awk \'{print $2}\'', capture=True)
elif so == 'Ubuntu':
pid = local('ps -ef | grep "nodejs bin/pepProxy" | grep -v grep | awk \'{print $2}\'', capture=True)
else:
raise NameError('Pep only can be started in Ubuntu and CentOS systems')
if pid != '':
for proc_pid in pid.split('\n'):
local('kill -9 {pid}'.format(pid=proc_pid.strip()), capture=True)
local('cp {config} {path}/config.js'.format(config=config, path=pep_path), capture=True)
local('mkdir -p {pep_tmp_dir}'.format(pep_tmp_dir=world.pep_tmp_dir))
if so == 'CentOS':
resp = local('dtach -n `mktemp -u {pep_tmp_dir}/dtach.XXXX` /bin/bash -c \' cd {path} && node bin/pepProxy >> {pep_tmp_dir}/pep.log\''.format(path=pep_path, pep_tmp_dir=world.pep_tmp_dir), capture=True)
elif so == 'Ubuntu':
resp = local('dtach -n `mktemp -u {pep_tmp_dir}/dtach.XXXX` /bin/bash -c \' cd {path} && nodejs bin/pepProxy >> {pep_tmp_dir}/pep.log\''.format(path=pep_path, pep_tmp_dir=world.pep_tmp_dir), capture=True)
else:
raise NameError('Pep only can be started in Ubuntu and CentOS systems')
world.log.debug('The response initializing pep in local is: {resp}'.format(resp=resp))
def stop_local_pep():
"""
Stop pep process
:return:
"""
world.log.info('Stoping pep in local')
output['stdout'] = False
output['running'] = False
output['warnings'] = False
so = local('cat /etc/issue', capture=True).split('\n')[0].split(' ')[0]
if so == 'CentOS':
pid = local('ps -ef | grep "node bin/pepProxy" | grep -v grep | awk \'{print $2}\'', capture=True)
elif so == 'Ubuntu':
pid = local('ps -ef | grep "nodejs bin/pepProxy" | grep -v grep | awk \'{print $2}\'', capture=True)
else:
raise NameError('Pep only can be started in Ubuntu and CentOS systems')
if pid != '':
for proc_pid in pid.split('\n'):
local('kill -9 {pid}'.format(pid=proc_pid.strip()), capture=True)
| agroknow/fiware-pep-steelskin | test/acceptance/tools/deploy_pep.py | Python | agpl-3.0 | 11,656 |
#!/usr/bin/env python
# -*- coding:utf8 -*-
from __future__ import print_function
import sys
from os.path import dirname, abspath
import nose
def run_all(argv=None):
sys.exitfunc = lambda: sys.stderr.write('Shutting down....\n')
# always insert coverage when running tests
if argv is None:
argv = [
'nosetests', '--with-xunit',
'--with-xcoverage', '--cover-package=nos',
'--cover-erase', '--cover-branches',
'--logging-filter=nos', '--logging-level=DEBUG',
'--verbose'
]
nose.run_exit(
argv=argv,
defaultTest=abspath(dirname(__file__))
)
if __name__ == '__main__':
run_all(sys.argv)
| NetEase-Object-Storage/nos-python-sdk | test_nos/run_tests.py | Python | mit | 707 |
#!/usr/bin/env python
#coding:utf-8
# Purpose: cell spanning controller
# Created: 13.02.2011
# Copyright (C) 2011, Manfred Moitzi
# License: MIT license
from __future__ import unicode_literals, print_function, division
__author__ = "mozman <mozman@gmx.at>"
from .xmlns import wrap
from .tableutils import iter_cell_range, iter_cell_range_without_start_pos
class CellSpanController(object):
# is not a public class
# public access only by Table or similar classes
# all cell references has to be 2-tuples!
def __init__(self, row_controller):
self._row_controller = row_controller
def _get_cell(self, pos):
return wrap(self._row_controller.get_cell(pos))
def is_cell_spanning(self, pos):
return self._get_cell(pos).span != (1, 1)
def set_span(self, pos, size):
self._check_pos_and_size(pos, size)
if self._has_cell_range_spanned_cells(pos, size):
raise ValueError("cell range contains already spanned cells")
for cell_index in iter_cell_range_without_start_pos(pos, size):
self._cover_cell(cell_index)
self._set_span_attributes(pos, size)
def _check_pos_and_size(self, pos, size):
start_row, start_column = pos
if start_row < 0 or start_column < 0:
raise IndexError("invalid start pos: %s" % tostr(pos))
nrows, ncols = size
if nrows < 1 or ncols < 1:
raise ValueError("invalid size parameter: %s" % tostr(size))
if start_row + nrows > self._row_controller.nrows() or \
start_column + ncols > self._row_controller.ncols():
raise ValueError("cell range exceeds table limits")
def _has_cell_range_spanned_cells(self, pos, size):
for cell_index in iter_cell_range(pos, size):
if self.is_cell_spanning(cell_index):
return True
return False
def _cover_cell(self, pos):
cell = self._get_cell(pos)
if not cell.covered:
cell._set_covered(True)
def _uncover_cell(self, pos):
cell = self._get_cell(pos)
if cell.covered:
cell._set_covered(False)
def _set_span_attributes(self, pos, size):
cell = self._get_cell(pos)
cell._set_span(size)
self._uncover_cell(pos)
def remove_span(self, pos):
if not self.is_cell_spanning(pos):
return # should it raise an error?
size = self._get_cell(pos).span
for cell_index in iter_cell_range(pos, size):
self._uncover_cell(cell_index)
self._remove_span_attributes(pos)
def _remove_span_attributes(self, pos):
cell = self._get_cell(pos)
cell._del_span_attributes()
self._uncover_cell(pos)
| chalbersma/btcaddrtocsv | ezodf/cellspancontroller.py | Python | lgpl-2.1 | 2,749 |
import logging
from xml.etree import ElementTree as etree
from xml.parsers import expat
try:
from oslo_serialization import jsonutils
except ImportError:
from oslo.serialization import jsonutils
from builtins import int
import six
from . import constants
from . import exceptions as exception
from ..i18n import _
LOG = logging.getLogger(__name__)
class ActionDispatcher(object):
def dispatch(self, *args, **kwargs):
action = kwargs.pop('action', 'default')
action_method = getattr(self, str(action), self.default)
return action_method(*args, **kwargs)
def default(self, data):
raise NotImplementedError()
class DictSerializer(ActionDispatcher):
def serialize(self, data, action='default'):
return self.dispatch(data, action=action)
def default(self, data):
return ""
class JSONDictSerializer(DictSerializer):
def default(self, data):
def sanitizer(obj):
return six.text_type(obj)
return jsonutils.dumps(data, default=sanitizer)
class XMLDictSerializer(DictSerializer):
def __init__(self, metadata=None, xmlns=None):
super(XMLDictSerializer, self).__init__()
self.metadata = metadata or {}
if not xmlns:
xmlns = self.metadata.get('xmlns')
if not xmlns:
xmlns = constants.XML_NS_V20
self.xmlns = xmlns
def default(self, data):
try:
links = None
has_atom = False
if data is None:
root_key = constants.VIRTUAL_ROOT_KEY
root_value = None
else:
link_keys = [k for k in six.iterkeys(data) or []
if k.endswith('_links')]
if link_keys:
links = data.pop(link_keys[0], None)
has_atom = True
root_key = (len(data) == 1 and
list(data.keys())[0] or constants.VIRTUAL_ROOT_KEY)
root_value = data.get(root_key, data)
doc = etree.Element("_temp_root")
used_prefixes = []
self._to_xml_node(doc, self.metadata, root_key,
root_value, used_prefixes)
if links:
self._create_link_nodes(list(doc)[0], links)
return self.to_xml_string(list(doc)[0], used_prefixes, has_atom)
except AttributeError as e:
LOG.exception(str(e))
return ''
def __call__(self, data):
return self.default(data)
def to_xml_string(self, node, used_prefixes, has_atom=False):
self._add_xmlns(node, used_prefixes, has_atom)
return etree.tostring(node, encoding='UTF-8')
def _add_xmlns(self, node, used_prefixes, has_atom=False):
node.set('xmlns', self.xmlns)
node.set(constants.TYPE_XMLNS, self.xmlns)
if has_atom:
node.set(constants.ATOM_XMLNS, constants.ATOM_NAMESPACE)
node.set(constants.XSI_NIL_ATTR, constants.XSI_NAMESPACE)
ext_ns = self.metadata.get(constants.EXT_NS, {})
for prefix in used_prefixes:
if prefix in ext_ns:
node.set('xmlns:' + prefix, ext_ns[prefix])
def _to_xml_node(self, parent, metadata, nodename, data, used_prefixes):
result = etree.SubElement(parent, nodename)
if ":" in nodename:
used_prefixes.append(nodename.split(":", 1)[0])
if isinstance(data, list):
if not data:
result.set(
constants.TYPE_ATTR,
constants.TYPE_LIST)
return result
singular = metadata.get('plurals', {}).get(nodename, None)
if singular is None:
if nodename.endswith('s'):
singular = nodename[:-1]
else:
singular = 'item'
for item in data:
self._to_xml_node(result, metadata, singular, item,
used_prefixes)
elif isinstance(data, dict):
if not data:
result.set(
constants.TYPE_ATTR,
constants.TYPE_DICT)
return result
attrs = metadata.get('attributes', {}).get(nodename, {})
for k, v in sorted(data.items()):
if k in attrs:
result.set(k, str(v))
else:
self._to_xml_node(result, metadata, k, v,
used_prefixes)
elif data is None:
result.set(constants.XSI_ATTR, 'true')
else:
if isinstance(data, bool):
result.set(
constants.TYPE_ATTR,
constants.TYPE_BOOL)
elif isinstance(data, int):
result.set(
constants.TYPE_ATTR,
constants.TYPE_INT)
elif isinstance(data, float):
result.set(
constants.TYPE_ATTR,
constants.TYPE_FLOAT)
LOG.debug("Data %(data)s type is %(type)s",
{'data': data,
'type': type(data)})
result.text = six.text_type(data)
return result
def _create_link_nodes(self, xml_doc, links):
for link in links:
link_node = etree.SubElement(xml_doc, 'atom:link')
link_node.set('rel', link['rel'])
link_node.set('href', link['href'])
class TextDeserializer(ActionDispatcher):
def deserialize(self, datastring, action='default'):
return self.dispatch(datastring, action=action)
def default(self, datastring):
return {}
class JSONDeserializer(TextDeserializer):
def _from_json(self, datastring):
try:
return jsonutils.loads(datastring)
except ValueError:
msg = _("Cannot understand JSON")
raise exception.MalformedResponseBody(reason=msg)
def default(self, datastring):
return {'body': self._from_json(datastring)}
class XMLDeserializer(TextDeserializer):
def __init__(self, metadata=None):
super(XMLDeserializer, self).__init__()
self.metadata = metadata or {}
xmlns = self.metadata.get('xmlns')
if not xmlns:
xmlns = constants.XML_NS_V20
self.xmlns = xmlns
def _get_key(self, tag):
tags = tag.split("}", 1)
if len(tags) == 2:
ns = tags[0][1:]
bare_tag = tags[1]
ext_ns = self.metadata.get(constants.EXT_NS, {})
if ns == self.xmlns:
return bare_tag
for prefix, _ns in ext_ns.items():
if ns == _ns:
return prefix + ":" + bare_tag
else:
return tag
def _get_links(self, root_tag, node):
link_nodes = node.findall(constants.ATOM_LINK_NOTATION)
root_tag = self._get_key(node.tag)
link_key = "%s_links" % root_tag
link_list = []
for link in link_nodes:
link_list.append({'rel': link.get('rel'),
'href': link.get('href')})
node.remove(link)
return link_list and {link_key: link_list} or {}
def _from_xml(self, datastring):
if datastring is None:
return None
plurals = set(self.metadata.get('plurals', {}))
try:
node = etree.fromstring(datastring)
root_tag = self._get_key(node.tag)
links = self._get_links(root_tag, node)
result = self._from_xml_node(node, plurals)
if root_tag == constants.VIRTUAL_ROOT_KEY:
return result
return dict({root_tag: result}, **links)
except Exception as e:
parseError = False
if (hasattr(etree, 'ParseError') and
isinstance(e, getattr(etree, 'ParseError'))):
parseError = True
elif isinstance(e, expat.ExpatError):
parseError = True
if parseError:
msg = _("Cannot understand XML")
raise exception.MalformedResponseBody(reason=msg)
else:
raise
def _from_xml_node(self, node, listnames):
attrNil = node.get(str(etree.QName(constants.XSI_NAMESPACE, "nil")))
attrType = node.get(str(etree.QName(
self.metadata.get('xmlns'), "type")))
if (attrNil and attrNil.lower() == 'true'):
return None
elif not len(node) and not node.text:
if (attrType and attrType == constants.TYPE_DICT):
return {}
elif (attrType and attrType == constants.TYPE_LIST):
return []
else:
return ''
elif (len(node) == 0 and node.text):
converters = {constants.TYPE_BOOL:
lambda x: x.lower() == 'true',
constants.TYPE_INT:
lambda x: int(x),
constants.TYPE_FLOAT:
lambda x: float(x)}
if attrType and attrType in converters:
return converters[attrType](node.text)
else:
return node.text
elif self._get_key(node.tag) in listnames:
return [self._from_xml_node(n, listnames) for n in node]
else:
result = dict()
for attr in node.keys():
if (attr == 'xmlns' or
attr.startswith('xmlns:') or
attr == constants.XSI_ATTR or
attr == constants.TYPE_ATTR):
continue
result[self._get_key(attr)] = node.get(attr)
children = list(node)
for child in children:
result[self._get_key(child.tag)] = self._from_xml_node(
child, listnames)
return result
def default(self, datastring):
return {'body': self._from_xml(datastring)}
def __call__(self, datastring):
return self.default(datastring)
class Serializer(object):
def __init__(self, metadata=None, default_xmlns=None):
self.metadata = metadata or {}
self.default_xmlns = default_xmlns
def _get_serialize_handler(self, content_type):
handlers = {
'application/json': JSONDictSerializer(),
'application/xml': XMLDictSerializer(self.metadata),
}
try:
return handlers[content_type]
except Exception:
raise exception.InvalidContentType(content_type=content_type)
def serialize(self, data, content_type):
return self._get_serialize_handler(content_type).serialize(data)
def deserialize(self, datastring, content_type):
return self.get_deserialize_handler(content_type).deserialize(
datastring)
def get_deserialize_handler(self, content_type):
handlers = {
'application/json': JSONDeserializer(),
'application/xml': XMLDeserializer(self.metadata),
}
try:
return handlers[content_type]
except Exception:
raise exception.InvalidContentType(content_type=content_type)
| nttcom/eclcli | eclcli/network/networkclient/common/serializer.py | Python | apache-2.0 | 11,379 |
"""Copyright (C) 2013 COLDWELL AG
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import json
import gevent
from .models import Account, Profile, HosterAccount, PremiumAccount, Http, HttpAccount, HttpHosterAccount, HttpPremiumAccount, \
MultiAccount, HttpMultiAccount
from .manager import manager, log, config
from ..hoster.this import localctx
from ..scheme import transaction
from .. import db, interface, settings
from ..api import proto
from . import verify
def init():
# plugins are loaded by hoster.py
Account.localctx = localctx
with transaction, db.Cursor() as c:
aa = c.execute("SELECT * FROM account")
for a in aa.fetchall():
try:
name = json.loads(a['name'])
data = json.loads(a['data'])
data['id'] = int(a['id'])
a = manager.get_pool(name).add(**data)
except TypeError:
log.critical("broken row: {}".format(a))
c.execute("DELETE FROM account WHERE id={}".format(a["id"]))
except AttributeError:
log.critical("hoster account for {} not exists anymore".format(name))
@interface.register
class AccountInterface(interface.Interface):
name = "account"
def add(name=None, **kwargs):
"""adds a new account
generally the plugin name is needed: {name: 'plugin name'}
for default hoster plugins additional: {username: 'name', password: 'pass'}
for http profiles: {
host: 'hostname', port: port, username: 'user', password: 'pass',
auth_method: 'auth',
cookies: {key: value, key2: value2},
headers: {key: value, key2, value2}}
for ftp profiles: {
host: 'hostname', port: port, username: 'user', password: 'pass'}"""
account = manager.get_pool(name).add(**kwargs)
if account:
return account.id
def remove(id=None):
"""removes an account"""
with transaction:
try:
pool, account = manager.get_account_by_id(int(id))
except ValueError:
pass # account already deleted (not found)
else:
pool.remove(account)
def reset(id=None):
"""resets an account (logout and clear infos ...)"""
with transaction:
pool, account = manager.get_account_by_id(int(id))
account.reset()
def check(id=None):
"""rechecks an account (makes reset, than check)"""
with transaction:
pool, account = manager.get_account_by_id(int(id))
account.reset()
account.boot()
recheck = check
def modify(id=None, update=None):
"""modifies files. arguments are the same as on modify_package"""
pool, account = manager.get_account_by_id(int(id))
with transaction:
enabled = account.enabled
account.reset()
account.enabled = enabled
account.modify_table(update)
account.boot()
def sync(clients=None):
for name, pool in manager.iteritems():
for acc in pool:
if acc._private_account:
continue
data = acc.get_login_data()
if not data:
continue
data['name'] = name
data['enabled'] = acc.enabled
for client in clients:
if client == settings.app_uuid:
continue
proto.send('client', 'account.add', payload=data, channel=client)
def list_plugins():
"""lists all account plugins"""
return list(manager)
def set_secret(hoster=None, code=None, timeleft=None):
verify.set_secret(hoster, code, timeleft)
| MoroGasper/client | client/account/__init__.py | Python | gpl-3.0 | 4,393 |
# -*- coding: utf-8 -*-
"""Tools for visualizing ADCP data that is read and processed by the adcpy module
This module is imported under the main adcpy, and should be available as
adcpy.plot. Some methods can be used to visualize flat arrays, independent of
adcpy, and the plots may be created quickly using the IPanel and QPanel
classes.
This code is open source, and defined by the included MIT Copyright License
Designed for Python 2.7; NumPy 1.7; SciPy 0.11.0; Matplotlib 1.2.0
2014-09 - First Release; blsaenz, esatel
"""
import numpy as np
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
import scipy.stats.stats as sp
from matplotlib.dates import num2date#,date2num,
import adcpy
from adcpy_recipes import calc_transect_flows_from_uniform_velocity_grid
U_str = 'u'
V_str = 'v'
W_str = 'w'
vel_strs = (U_str,V_str,W_str)
# Common formatting for datenums:
def fmt_dnum(dn):
return num2date(dn).strftime('%c')
class IPanel(object):
"""
This object stores and plots a 2D velocity map as an image. Any of the data
fields (kwarg_options) may be specificed as kwargs during initialization.
At minimum IPanel requires 'velocity' to be set.
"""
kwarg_options = ['use_pcolormesh',
'minv',
'maxv',
'velocity',
'title',
'units',
'xlabel',
'ylabel',
'x',
'y',
'chop_off_nans',
'x_is_mtime',
'arrow_color',
'xy_is_lonlat',
'interpolation',
'shading',
'my_axes']
def __init__(self,**kwargs):
# init everything to None
for kwarg in self.kwarg_options:
exec("self.%s = None"%kwarg)
# set defaults
self.minv = -0.25
self.maxv = 0.25
self.x_is_mtime = False
self.interpolation = 'nearest'
self.use_pcolormesh = False
self.shading = 'flat'
self.xy_is_lonlat = False
self.chop_off_nans = False
# read/save arguments
for kwarg in self.kwarg_options:
if kwarg in kwargs:
exec("self.%s = kwargs[kwarg]"%kwarg)
def plot(self,ax=None):
"""
Plots the data in IPanel onto the axis ax, or if ax is None,
onto self.my_axes.
Inputs:
ax = matplotlib axes object, or None
Returns:
Nothing
"""
# set desired axes
if ax is not None:
plt.sca(ax)
elif self.my_axes is not None:
ax = plt.sca(self.my_axes)
else:
ax = plt.gca()
if self.minv is not None:
mnv = ",vmin=self.minv"
else:
mnv = ""
if self.minv is not None:
mxv = ",vmax=self.maxv"
else:
mxv = ""
if self.use_pcolormesh:
vel_masked = np.ma.array(self.velocity,mask=np.isnan(self.velocity))
if self.x is not None and self.y is not None:
xy = "self.x,self.y,"
else:
xy = ""
plot_cmd = "pc=plt.pcolormesh(%svel_masked.T,shading=self.shading%s%s)"%(xy,mnv,mxv)
exec(plot_cmd)
else:
if self.x is not None and self.y is not None:
xy = ",extent=[self.x[0],self.x[-1],self.y[-1],self.y[0]]"
else:
xy = ""
plot_cmd = "pc=plt.imshow(self.velocity.T%s,interpolation=self.interpolation%s%s)"%(xy,mnv,mxv)
exec(plot_cmd)
if self.title is not None:
plt.title(self.title)
plt.axis('tight')
if self.chop_off_nans:
x_test = np.nansum(self.velocity,axis=1)
x_test = ~np.isnan(x_test)*np.arange(np.size(x_test))
if self.x is None:
plt.xlim([np.nanmin(x_test),np.nanmax(x_test)])
else:
plt.xlim([self.x[np.nanmin(x_test)],self.x[np.nanmax(x_test)]])
if self.x[-1] < self.x[0]:
plt.xlim(plt.xlim()[::-1])
y_test = np.nansum(self.velocity,axis=0)
y_test = ~np.isnan(y_test)*np.arange(np.size(y_test))
plt.ylim([np.nanmin(y_test),np.nanmax(y_test)])
if self.y is None:
plt.ylim([np.nanmin(y_test),np.nanmax(y_test)])
else:
plt.ylim([self.y[np.nanmin(y_test)],self.y[np.nanmax(y_test)]])
if self.y[-1] < self.y[0]:
plt.ylim(plt.ylim()[::-1])
if self.x_is_mtime:
ax.xaxis_date()
plt.gcf().autofmt_xdate()
elif self.xy_is_lonlat:
ax.yaxis.set_major_formatter(plt.FormatStrFormatter('%7.4f'))
ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%7.4f'))
plt.ylabel('Latitude [degrees N]')
plt.xlabel('Longitude [degrees E]')
if self.xlabel is not None:
plt.xlabel(self.xlabel)
if self.ylabel is not None:
plt.ylabel(self.ylabel)
plt.colorbar(pc, use_gridspec=True)
class QPanel(object):
"""
This object stores and plots a 1D or 2D velocity map as a quiver plot. Any
of the data fields (kwarg_options) may be specificed as kwargs during
initialization. At minimum QPanel requires 'velocity' to be set.
"""
kwarg_options = ['u_vecs',
'v_vecs',
'velocity',
'title',
'units',
'xlabel',
'ylabel',
'x',
'y',
'v_scale', # make arrow bigger or smaller, relatively speaking, daults to 1
'xpand', # fractional buffer around xy extent, to capture arrow ends
'x_is_mtime',
'arrow_color',
'xy_is_lonlat',
'equal_axes',
'my_axes']
def __init__(self,**kwargs):
# init everything to None
for kwarg in self.kwarg_options:
exec("self.%s = None"%kwarg)
# set defaults
self.u_vecs = 50
self.v_vecs = 50
self.x_is_mtime = False
self.xy_is_lonlat = False
self.arrow_color = 'k'
self.v_scale = 1.0
self.xpand = 0.33
self.equal_axes = False
# read/save arguments
for kwarg in self.kwarg_options:
if kwarg in kwargs:
exec("self.%s = kwargs[kwarg]"%kwarg)
def plot(self,ax=None):
"""
Plots the data in QPanel onto the axis ax, or if ax is None,
onto self.my_axes.
Inputs:
ax = matplotlib axes object, or None
Returns:
Nothing
"""
# set desired axes
if ax is not None:
plt.sca(ax)
elif self.my_axes is not None:
ax = plt.sca(self.my_axes)
else:
ax = plt.gca()
dims = np.shape(self.velocity)
u_reduction = max(1,int(dims[0]/self.u_vecs))
u_indices = np.arange(0,dims[0],u_reduction)
v_mag = np.sqrt(self.velocity[...,0]**2 + self.velocity[...,1]**2)
if len(dims) == 2:
vScale = np.nanmax(v_mag[u_indices])
local_vel = self.velocity[u_indices,...]
local_u = local_vel[:,0]
local_v = local_vel[:,1]
local_x = self.x[u_indices]
local_y = self.y[u_indices]
elif len(dims) == 3:
v_reduction = max(1,int(dims[1]/self.v_vecs))
v_indices = np.arange(0,dims[1],v_reduction)
v_mag = v_mag[u_indices,:]
v_mag = v_mag[:,v_indices]
vScale = np.nanmax(np.nanmax(v_mag))
local_vel = self.velocity[u_indices,:,:]
local_vel = local_vel[:,v_indices,:]
local_u = local_vel[:,:,0].T
local_v = local_vel[:,:,1].T
local_x,local_y = np.meshgrid(self.x[u_indices],self.y[v_indices])
vScale = max(vScale,0.126)
qk_value = np.round(vScale*4)/4
Q = plt.quiver(local_x,local_y,
local_u,local_v,
width=0.0015*self.v_scale,
headlength=10.0,
headwidth=7.0,
scale = 10.0*vScale/self.v_scale, #scale = 0.005,
color = self.arrow_color,
scale_units = 'width')
if self.equal_axes:
ax.set_aspect('equal')
if self.xpand is not None:
xpand = self.xpand
xspan = np.max(self.x) - np.min(self.x)
yspan = np.max(self.y) - np.min(self.y)
xspan = max(xspan,yspan)
yspan = xspan
x1 = np.min(self.x) - xpand*xspan; x2 = np.max(self.x) + xpand*xspan
plt.xlim([x1, x2])
y1 = np.min(self.y) - xpand*yspan; y2 = np.max(self.y) + xpand*yspan
plt.ylim([y1, y2])
qk = plt.quiverkey(Q, 0.5, 0.08, qk_value,
r'%3.2f '%qk_value + r'$ \frac{m}{s}$', labelpos='W',)
if self.title is not None:
plt.title(self.title,y=1.06)
if self.x_is_mtime:
ax.xaxis_date()
plt.gcf().autofmt_xdate()
elif self.xy_is_lonlat:
ax.yaxis.set_major_formatter(plt.FormatStrFormatter('%7.4f'))
ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%7.4f'))
plt.ylabel('Latitude [degrees N]')
plt.xlabel('Longitude [degrees E]')
if self.xlabel is not None:
plt.xlabel(self.xlabel)
if self.ylabel is not None:
plt.ylabel(self.ylabel)
#plt.autoscale(True)
def get_fig(fig):
"""
Returns a new figure if figure is None, otherwise passes returns fig.
Inputs:
fig = matplotlib figure object, or None
Returns:
fig = either passes, or new matplotlib figure object
"""
if fig is None:
return plt.figure()
else:
return fig
def plot_vertical_panels(vpanels,fig=None,title=None):
"""
Plots a list of panels in a vertical arangement in in figure window.
Inputs:
fig = matplotlib figure object in which to plot, or None for a new figure
Returns:
nothing
"""
fig_handle = get_fig(fig)
plt.clf()
n_panels = len(vpanels)
sp_base = 100*n_panels+10
for i in range(n_panels):
plt.subplot(sp_base+i+1)
vpanels[i].plot()
plt.tight_layout()
if title is not None:
plt.title(title)
return fig_handle
def show():
"""
Shortcut to matplotlib.pyplot.show()
"""
plt.show()
def find_array_value_bounds(nparray,resolution):
"""
Find the bounds of the array values, adding up + resolution to make the bounds
a round out of a multiple of resolution.
Inputs:
nparray = array of numbers for which bounds are needed
resoution = number of which the bounds will be rounded up toward
Returns:
minv = minimum bound value of nparray
maxv = maximum bound value of nparray
"""
inv = 1.0/resolution
mtest = np.floor(nparray*inv)
minv = np.nanmin(np.nanmin(mtest))*resolution
mtest = np.ceil(nparray*inv)
maxv = np.nanmax(np.nanmax(mtest))*resolution
return (minv,maxv)
def find_plot_min_max_from_velocity(velocity_2d,res=None,equal_res_about_zero=True):
"""
Finds bounds as in find_array_value_bounds(), then optinoally
equates then +/- from zero. If res is None, returns None
Inputs:
nparray = array of numbers for which bounds are needed [2D numpy array]
res = number of which the bounds will be rounded up toward [number]
equal_res_about_zero = toggle to switch [True/False]
Returns:
minv =- minimum bound value of nparray, or None
maxv = maximum bound value of nparray, or None
"""
if res is not None:
minv, maxv = find_array_value_bounds(velocity_2d,res)
if equal_res_about_zero:
maxv = np.max(np.abs(minv),np.abs(minv))
minv = -1.0*maxv
else:
minv = None
maxv = None
return (minv,maxv)
def get_basic_velocity_panel(velocity_2d,res=None,equal_res_about_zero=True):
"""
Returns an IPanel with from a 2D velocity array.
Inputs:
nparray = array of numbers for which bounds are needed
res = number of which the bounds will be rounded up toward
equal_res_about_zero = toggle to switch [True/False]
Returns:
IPanel onject
"""
minv, maxv = find_plot_min_max_from_velocity(velocity_2d,res,
equal_res_about_zero)
return IPanel(velocity = velocity_2d,
x = None,
y = None,
minv = minv,
maxv = maxv,
units = 'm/s')
def plot_uvw_velocity_array(velocity,fig=None,title=None,ures=None,vres=None,wres=None,
equal_res_about_zero=True):
"""
Generates a figure with three panels showing U,V,W velocity from a single 3D
velocity array
Inputs:
velocity = [x,y,3] shape numpy array of 2D velocities
fig = input figure number [integer or None]
ures,vres,wres = numbers by which the velocity bounds will be rounded up toward [number or None]
equal_res_about_zero = toggle to switch [True/False]
Returns:
fig = matplotlib figure object
"""
panels = []
res = [ures, vres, wres]
for i in range(3):
if i == 0 and title is not None:
title_str = title + " - "
else:
title_str = ""
panels.append(get_basic_velocity_panel(velocity[:,:,i],res=res[i],equal_res_about_zero=False))
panels[-1].title = "%s%s Velocity [m/s]"%(title_str,vel_strs[i])
panels[-1].use_pcolormesh = False
fig = plot_vertical_panels(panels)
plt.tight_layout()
return fig
def plot_secondary_circulation(adcp,u_vecs,v_vecs,fig=None,title=None):
"""
Generates a with a single panel, plotting U velocity as an IPanel, overlain by
VW vectors from a QPanel.
Inputs:
adcp = ADCPData object
u_vecs,v_vecs = desired number of horizontal/vertical vectors [integers]
fig = input figure number [integer or None]
title = figure title text [string or None]
Returns:
fig = matplotlib figure object
"""
if fig is None:
fig = plt.figure(fig,figsize=(10,4))
else:
plt.clf()
xd,yd,dd,xy_line = adcpy.util.find_projection_distances(adcp.xy)
stream_wise = get_basic_velocity_panel(adcp.velocity[:,:,1],res=0.01)
stream_wise.x = dd
stream_wise.y = adcp.bin_center_elevation
stream_wise.chop_off_nans = True
secondary = QPanel(velocity = adcp.velocity[:,:,1:],
x = dd,
y = adcp.bin_center_elevation,
xpand = None,
v_scale = 1.5,
u_vecs = u_vecs,
v_vecs = v_vecs,
arrow_color = 'k',
units = 'm/s')
stream_wise.plot()
secondary.plot()
if title is not None:
plt.title(title)
return fig
def plot_secondary_circulation_over_streamwise(adcp,u_vecs,v_vecs,fig=None,title=None):
"""
Generates a with a single panel, plotting U velocity as an IPanel, overlain by
VW vectors from a QPanel.
Inputs:
adcp = ADCPData object
u_vecs,v_vecs = desired number of horizontal/vertical vectors [integers]
fig = input figure number [integer or None]
title = figure title text [string or None]
Returns:
fig = matplotlib figure object
"""
if fig is None:
fig = plt.figure(fig,figsize=(10,4))
else:
plt.clf()
xd,yd,dd,xy_line = adcpy.util.find_projection_distances(adcp.xy)
stream_wise = get_basic_velocity_panel(adcp.velocity[:,:,0],res=0.01)
stream_wise.x = dd
stream_wise.y = adcp.bin_center_elevation
stream_wise.chop_off_nans = True
secondary = QPanel(velocity = adcp.velocity[:,:,1:],
x = dd,
y = adcp.bin_center_elevation,
xpand = None,
v_scale = 1.5,
u_vecs = u_vecs,
v_vecs = v_vecs,
arrow_color = 'k',
units = 'm/s')
stream_wise.plot()
secondary.plot()
if title is not None:
plt.title(title)
return fig
def plot_ensemble_mean_vectors(adcp,fig=None,title=None,n_vectors=50,return_panel=False):
"""
Generates a QPanel, plotting mean uv velocity vectors in the x-y plane.
Inputs:
adcp = ADCPData object
fig = input figure number [integer or None]
title = figure title text [string or None]
n_vectors = desired number of vectors [integer]
return_panel = optinally return the QPanel instead of the figure
Returns:
fig = matplotlib figure object, or
vectors = QPanel object
"""
dude = np.zeros((adcp.n_ensembles,2),np.float64)
velocity = adcp.get_unrotated_velocity()
# this doesn't factor in depth, may integrate bad values if the have not been filtered into NaNs somehow
dude[:,0] = sp.nanmean(velocity[:,:,0],axis=1)
dude[:,1] = sp.nanmean(velocity[:,:,1],axis=1)
vectors = QPanel(velocity = dude,
u_vecs = n_vectors,
arrow_color = 'k',
title = title,
units = 'm/s')
if adcp.xy is not None:
vectors.x = adcp.xy[:,0]
vectors.y = adcp.xy[:,1]
vectors.xlabel = 'm'
vectors.ylabel = 'm'
vectors.equal_axes = True
elif adcp.lonlat is not None:
vectors.x = adcp.lonlat[:,0]
vectors.y = adcp.lonlat[:,1]
vectors.xy_is_lonlat = True
else:
vectors.x = adcp.mtime
vectors.y = np.zeros(np.size(vectors.x))
vectors.x_is_mtime = True
if return_panel:
return vectors
else:
fig = get_fig(fig)
vectors.plot()
plt.tight_layout()
return fig
def plot_obs_group_xy_lines(adcp_obs,fig=None,title=None):
"""
Produces a quick plot of the adcp ensemble x-y locations, from
a list of ADCPData objects. x-y tracks lines are colored differently
for each ADCPData object.
Inputs:
adcp_obs = list ADCPData objects
fig = input figure number [integer or None]
title = figure title text [string or None]
Returns:
fig = matplotlib figure object
"""
fig = get_fig(fig)
plt.hold(True)
legends = []
for a in adcp_obs:
if a.mtime is not None:
label = a.source+"; "+fmt_dnum(a.mtime[0])
else:
label = a.source
plot_xy_line(a,fig,label=label,use_stars_at_xy_locations=False)
plt.legend(prop={'size':10})
if title is not None:
plt.title(title,y=1.06)
return fig
def plot_xy_line(adcp,fig=None,title=None,label=None,use_stars_at_xy_locations=True):
"""
Produces a quick plot of the adcp ensemble x-y locations, from an ADCPData
object.
Inputs:
adcp_obs = list ADCPData objects
fig = input figure number [integer or None]
title = figure title text [string or None]
use_stars_at_xy_locations = plots * at actual ensemble locations [True/False]
Returns:
fig = matplotlib figure object
"""
fig = get_fig(fig)
if adcp.xy is not None:
x = adcp.xy[:,0]
y = adcp.xy[:,1]
elif adcp.lonlat is not None:
x = adcp.lonlat[:,0]
y = adcp.lonlat[:,1]
else:
raise Exception,"plot_xy_line(): no position data in ADCPData object"
if use_stars_at_xy_locations:
plt.plot(x,y,marker='*',label=label)
else:
plt.plot(x,y,label=label)
if title is not None:
plt.title(title,y=1.06)
formatter = matplotlib.ticker.ScalarFormatter(useOffset=False)
plt.gca().yaxis.set_major_formatter(formatter)
plt.gca().xaxis.set_major_formatter(formatter)
return fig
def plot_uvw_velocity(adcp,uvw='uvw',fig=None,title=None,ures=None,vres=None,wres=None,
equal_res_about_zero=True,return_panels=False):
"""
Produces a quick plot of the adcp ensemble x-y locations, from an ADCPData
object.
Inputs:
adcp_obs = list ADCPData objects
fig = input figure number [integer or None]
title = figure title text [string or None]
use_stars_at_xy_locations = plots * at actual ensemble locations [True/False]
Returns:
fig = matplotlib figure object
"""
panels = []
dx = None
dt = None
res = [ures, vres, wres]
if adcp.xy is not None:
if np.size(adcp.xy[:,0]) == adcp.n_ensembles:
xd,yd,dx,xy_line = adcpy.util.find_projection_distances(adcp.xy)
if adcp.mtime is not None:
if np.size(adcp.mtime) == adcp.n_ensembles:
dt = adcp.mtime
ax = adcpy.util.get_axis_num_from_str(uvw)
for i in ax:
if i == ax[0] and title is not None:
title_str = title + " - "
else:
title_str = ""
panels.append(get_basic_velocity_panel(adcp.velocity[:,:,i],res=res[i]))
panels[-1].title = "%s%s Velocity [m/s]"%(title_str,vel_strs[i])
if dx is not None:
# plotting velocity projected along a line
panels[-1].x = dx
panels[-1].xlabel = 'm'
panels[-1].ylabel = 'm'
panels[-1].y = adcp.bin_center_elevation
elif dt is not None:
# plotting velocity ensembles vs time
panels[-1].x = dt
panels[-1].x_is_mtime = True
panels[-1].y = adcp.bin_center_elevation
panels[-1].ylabel = 'm'
panels[-1].use_pcolormesh = False
else:
# super basic plot
panels[-1].use_pcolormesh = False
if return_panels:
return panels
else:
fig = plot_vertical_panels(panels)
return fig
def plot_flow_summmary(adcp,title=None,fig=None,ures=None,vres=None,use_grid_flows=False):
"""
Plots projected mean flow vectors, U and V velocity profiles, and
associated text data on a single plot.
Inputs:
adcp_obs = list ADCPData objects
fig = input figure number [integer or None]
title = figure title text [string or None]
ures,vres = numbers by which the velocity bounds will be rounded up toward [number or None]
use_grid_flows = calculates flows using crossproduct flow (if available)
[True] or by weighted summing of grid cells [False]
Returns:
fig = matplotlib figure object
"""
if adcp.xy is None:
ValueError('Cannot plot summary without projected data.')
raise
if fig is None:
fig = plt.figure(fig,figsize=(8,10.5))
else:
plt.clf()
vectors = plot_ensemble_mean_vectors(adcp,n_vectors=30,return_panel=True)
vectors.x = vectors.x - np.min(vectors.x)
vectors.y = vectors.y - np.min(vectors.y)
u_panel,v_panel = plot_uvw_velocity(adcp,uvw='uv',fig=fig,ures=ures,
vres=vres,return_panels=True)
u_panel.chop_off_nans = True
u_panel.xlabel = None
v_panel.chop_off_nans = True
xd,yd,dd,xy_line = adcpy.util.find_projection_distances(adcp.xy)
plt.subplot(221)
vectors.plot()
plt.subplot(413)
u_panel.plot()
plt.subplot(414)
v_panel.plot()
plt.tight_layout()
if title is not None:
plt.text(0.55,0.933,title,
horizontalalignment='left',
verticalalignment='center',
fontsize=10,
transform = fig.transFigure)
if adcp.mtime is not None:
plt.text(0.55,0.9,'Start of Data: %s'%( num2date(adcp.mtime[0]).strftime('%c')),
horizontalalignment='left',
verticalalignment='center',
fontsize=10,
transform = fig.transFigure)
if adcp.rotation_angle is not None:
if np.size(adcp.rotation_angle) > 1:
rot_str = 'Rozovski'
else:
rot_str = '%5.2f degrees'%(adcp.rotation_angle*180.0/np.pi)
else:
rot_str = 'None'
plt.text(0.55,0.866,'Streawise Rotation: %s'%rot_str,
horizontalalignment='left',
verticalalignment='center',
fontsize=10,
transform=fig.transFigure)
x1 = min(adcp.xy[:,0][np.nonzero(~np.isnan(adcp.xy[:,0]))])
y1 = min(adcp.xy[:,1][np.nonzero(~np.isnan(adcp.xy[:,1]))])
loc_string = 'Plot origin (%s) = (%i,%i)'%(adcp.xy_srs,
int(x1),
int(y1))
plt.text(0.55,0.833,loc_string,
horizontalalignment='left',
verticalalignment='center',
fontsize=10,
transform = fig.transFigure)
if not use_grid_flows and 'calc_crossproduct_flow' in dir(adcp):
wrums,wru,tsa,tcsa = adcp.calc_crossproduct_flow()
plt.text(0.55,0.8,'Mean cross-product velocity [m/s]: %3.2f'%wrums,
horizontalalignment='left',
verticalalignment='center',
fontsize=10,
transform = fig.transFigure)
plt.text(0.55,0.766,'Mean cross-product flow [m^3/s]: %12.2f'%wru,
horizontalalignment='left',
verticalalignment='center',
fontsize=10,
transform = fig.transFigure)
else:
(scalar_mean_vel, depth_averaged_vel, total_flow, total_survey_area) = \
calc_transect_flows_from_uniform_velocity_grid(adcp,use_grid_only=True)
plt.text(0.55,0.8,'Mean U velocity [m/s]: %3.2f'%scalar_mean_vel[0],
horizontalalignment='left',
verticalalignment='center',
fontsize=10,
transform = fig.transFigure)
plt.text(0.55,0.766,'Mean V velocity [m/s]: %3.2f'%scalar_mean_vel[1],
horizontalalignment='left',
verticalalignment='center',
fontsize=10,
transform = fig.transFigure)
plt.text(0.55,0.733,'Mean U flow [m^3/s]: %12.2f'%total_flow[0],
horizontalalignment='left',
verticalalignment='center',
fontsize=10,
transform = fig.transFigure)
plt.text(0.55,0.7,'Mean V flow [m^3/s]: %12.2f'%total_flow[1],
horizontalalignment='left',
verticalalignment='center',
fontsize=10,
transform = fig.transFigure)
if adcp.source is not None:
plt.text(0.55,0.633,'Sources:\n%s'%adcp.source,
horizontalalignment='left',
verticalalignment='center',
fontsize=10,
transform = fig.transFigure)
return fig
| esatel/ADCPy | adcpy/adcpy_plot.py | Python | mit | 27,568 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2012 ChriCar Beteiligungs- und Beratungs- GmbH (<http://www.camptocamp.at>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
import openerp.netsvc
import logging
class account_invoice(osv.osv):
_inherit = "account.invoice"
def button_validate(self, cr , uid, ids, context=None):
"""FIXME
workaround because of limited multi company support
"""
_logger = logging.getLogger(__name__)
if not context:
context = {}
for invoice in self.browse(cr, uid, ids, context):
_logger.debug('FGF validate partner %s ' %(invoice.partner_id.id) )
if invoice.partner_id.company_id and invoice.partner_id.company_id.id != invoice.company_id.id:
_logger.debug('FGF update partner %s ' %(invoice.partner_id.id) )
self.pool.get('res.partner').write(cr, 1, [invoice.partner_id.id], {'company_id':''})
res= self.button_validate(cr , uid, ids, context)
return res
account_invoice()
| VitalPet/c2c-rd-addons | sale_order_2_purchase/invoice.py | Python | agpl-3.0 | 1,994 |
#!/usr/bin/python
"""Simple server that listens on port 6000 and echos back every input to the client.
Connect to it with:
telnet localhost 6000
Terminate the connection by terminating telnet (typically Ctrl-] and then 'quit').
"""
import gevent
from gevent.server import StreamServer
def discard(socket, address):
print ('New connection from %s:%s' % address)
fileobj = socket.makefile()
while True:
line = fileobj.readline()
if not line: return
print ("got %r" % line)
if __name__ == '__main__':
# to make the server use SSL, pass certfile and keyfile arguments to the constructor
s1 = StreamServer(('localhost', 59068), discard)
s1.start()
s2 = StreamServer(('localhost', 59067), lambda a,b:None)
s2.start()
gevent.sleep(30)
s1.stop()
s2.stop()
| smurfix/HomEvenT | test/scripts/wago_job.py | Python | gpl-3.0 | 772 |
from bin_heap import binHeap
import pytest
import random
def test_empty_heap():
blist = binHeap()
assert blist.heap == [0]
def test_push_pop():
blist = binHeap()
blist.push(123)
assert blist.pop() == 123
def all_list(heap):
value_input = []
while True:
try:
value_input.append(heap.pop())
except IndexError:
return value_input
return value_input
def test_input_random():
blist = binHeap()
for x in random.sample(range(123), 123):
blist.push(x)
assert all_list(blist) == range(0, 123) | alibulota/data-structures | test_bin_heap.py | Python | mit | 582 |
"""CLI tests for Repository setup.
:Requirement: Repository
:CaseAutomation: Automated
:CaseLevel: Component
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import pytest
from robottelo.cli.factory import (
make_lifecycle_environment,
make_org,
)
from robottelo.constants import (
CUSTOM_PUPPET_REPO,
DISTROS_SUPPORTED,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_0_CUSTOM_PACKAGE,
FAKE_0_YUM_REPO,
)
from robottelo.datafactory import xdist_adapter
from robottelo.decorators import tier4
from robottelo.products import (
DockerRepository,
PuppetRepository,
YumRepository,
RepositoryCollection,
SatelliteToolsRepository,
)
from robottelo.vm import VirtualMachine
def _distro_cdn_variants():
distro_cdn = []
for cdn in [False, True]:
for distro in DISTROS_SUPPORTED:
distro_cdn.append((distro, cdn))
return distro_cdn
@pytest.fixture(scope='module')
def module_org():
return make_org()
@pytest.fixture(scope='module')
def module_lce(module_org):
return make_lifecycle_environment({'organization-id': module_org['id']})
@tier4
@pytest.mark.parametrize('value', **xdist_adapter(_distro_cdn_variants()))
def test_vm_install_package(value, module_org, module_lce):
"""Install a package with all supported distros and cdn not cdn variants
:id: b2a6065a-69f6-4805-a28b-eaaa812e0f4b
:expectedresults: Package is install is installed
"""
# the value is support distro DISTRO_RH6 or DISTRO_RH7
# this will create 4 tests:
# - one test with disto rhel6 cdn False
# - one test with distro rhel7 cdn False
# - one test with disto rhel6 cdn True
# - one test with distro rhel7 cdn True
distro, cdn = value
repos_collection = RepositoryCollection(
distro=distro,
repositories=[
SatelliteToolsRepository(cdn=cdn),
YumRepository(url=FAKE_0_YUM_REPO),
DockerRepository(url=DOCKER_REGISTRY_HUB, upstream_name=DOCKER_UPSTREAM_NAME),
PuppetRepository(url=CUSTOM_PUPPET_REPO,
modules=[dict(name='generic_1', author='robottelo')])
]
)
# this will create repositories , content view and activation key
repos_collection.setup_content(module_org['id'], module_lce['id'], upload_manifest=True)
with VirtualMachine(distro=distro) as vm:
# this will install katello ca, register vm host, enable rh repos,
# install katello-agent
repos_collection.setup_virtual_machine(vm, enable_custom_repos=True)
# install a package
result = vm.run('yum -y install {0}'.format(FAKE_0_CUSTOM_PACKAGE))
assert result.return_code == 0
| ldjebran/robottelo | tests/foreman/cli/test_vm_install_products_package.py | Python | gpl-3.0 | 2,762 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import codecs
from setuptools import setup
def read(fname):
file_path = os.path.join(os.path.dirname(__file__), fname)
return codecs.open(file_path, encoding='utf-8').read()
setup(
name='pytest-yapf',
version='0.1.1',
author='Roman Osipenko',
author_email='roman.osipenko@djangostars.com',
maintainer='Roman Osipenko',
maintainer_email='roman.osipenko@djangostars.com',
license='MIT',
url='https://github.com/django-stars/pytest-yapf',
description='Run yapf',
long_description=read('README.rst'),
py_modules=['pytest_yapf'],
install_requires=['pytest>=3.1.1', 'yapf>=0.16.2'],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Pytest',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
],
entry_points={
'pytest11': [
'yapf = pytest_yapf',
],
},
)
| django-stars/pytest-yapf | setup.py | Python | mit | 1,383 |
# Copyright 2014 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Continuous Integration Database Library."""
import glob
import logging
import os
import re
import sqlalchemy
from chromite.cbuildbot import constants
TEST_DB_CREDENTIALS_DIR = os.path.join(constants.SOURCE_ROOT,
'crostools', 'cidb',
'test_credentials')
CIDB_MIGRATIONS_DIR = os.path.join(constants.CHROMITE_DIR, 'cidb',
'migrations')
class DBException(Exception):
"""General exception class for this module."""
class UnsupportedMethodException(DBException):
"""Raised when a call is made that the database does not support."""
def minimum_schema(min_version):
"""Generate a decorator to specify a minimum schema version for a method.
This decorator should be applied only to instance methods of
SchemaVersionedMySQLConnection objects.
"""
def decorator(f):
def wrapper(self, *args):
if self.schema_version < min_version:
raise UnsupportedMethodException()
return f(self, *args)
return wrapper
return decorator
class SchemaVersionedMySQLConnection(object):
"""Connection to a database that is aware of its schema version."""
SCHEMA_VERSION_TABLE_NAME = 'schemaVersionTable'
SCHEMA_VERSION_COL = 'schemaVersion'
def __init__(self, db_name, db_migrations_dir, db_credentials_dir):
"""SchemaVersionedMySQLConnection constructor.
Args:
db_name: Name of the database to connect to.
db_migrations_dir: Absolute path to directory of migration scripts
for this database.
db_credentials_dir: Absolute path to directory containing connection
information to the database. Specifically, this
directory should contain files names user.txt,
password.txt, host.txt, client-cert.pem,
client-key.pem, and server-ca.pem
"""
self.db_migrations_dir = db_migrations_dir
self.db_credentials_dir = db_credentials_dir
self.db_name = db_name
with open(os.path.join(db_credentials_dir, 'password.txt')) as f:
password = f.read().strip()
with open(os.path.join(db_credentials_dir, 'host.txt')) as f:
host = f.read().strip()
with open(os.path.join(db_credentials_dir, 'user.txt')) as f:
user = f.read().strip()
cert = os.path.join(db_credentials_dir, 'client-cert.pem')
key = os.path.join(db_credentials_dir, 'client-key.pem')
ca = os.path.join(db_credentials_dir, 'server-ca.pem')
ssl_args = {'ssl': {'cert': cert, 'key': key, 'ca': ca}}
connect_string = 'mysql://%s:%s@%s' % (user, password, host)
# Create a temporary engine to connect to the mysql instance, and check if
# a database named |db_name| exists. If not, create one.
temp_engine = sqlalchemy.create_engine(connect_string,
connect_args=ssl_args)
databases = temp_engine.execute('SHOW DATABASES').fetchall()
if (db_name,) not in databases:
temp_engine.execute('CREATE DATABASE %s' % db_name)
logging.info('Created database %s', db_name)
temp_engine.dispose()
# Now create the persistent connection to the database named |db_name|.
# If there is a schema version table, read the current schema version
# from it. Otherwise, assume schema_version 0.
connect_string = '%s/%s' % (connect_string, db_name)
self.engine = sqlalchemy.create_engine(connect_string,
connect_args=ssl_args)
self.schema_version = self.QuerySchemaVersion()
def DropDatabase(self):
"""Delete all data and tables from database, and drop database.
Use with caution. All data in database will be deleted. Invalidates
this database connection instance.
"""
self.engine.execute('DROP DATABASE %s' % self.db_name)
self.engine.dispose()
def QuerySchemaVersion(self):
"""Query the database for its current schema version number.
Returns:
The current schema version from the database's schema version table,
as an integer, or 0 if the table is empty or nonexistent.
"""
tables = self.engine.execute('SHOW TABLES').fetchall()
if (self.SCHEMA_VERSION_TABLE_NAME,) in tables:
r = self.engine.execute('SELECT MAX(%s) from %s' %
(self.SCHEMA_VERSION_COL, self.SCHEMA_VERSION_TABLE_NAME))
return r.fetchone()[0] or 0
else:
return 0
def ApplySchemaMigrations(self, maxVersion=None):
"""Apply pending migration scripts to database, in order.
Args:
maxVersion: The highest version migration script to apply. If
unspecified, all migrations found will be applied.
"""
# Look for migration script files in the migration script directory,
# with names of the form [number]*.sql, and sort these by number.
migration_scripts = glob.glob(os.path.join(self.db_migrations_dir, '*.sql'))
migrations = []
for script in migration_scripts:
match = re.match(r'([0-9]*).*', os.path.basename(script))
if match:
migrations.append((int(match.group(1)), script))
migrations.sort()
# Execute the migration scripts in order, asserting that each one
# updates the schema version to the expected number. If maxVersion
# is specified stop early.
for (number, script) in migrations:
if maxVersion is not None and number > maxVersion:
break
if number > self.schema_version:
self.RunQueryScript(script)
self.schema_version = self.QuerySchemaVersion()
if self.schema_version != number:
raise DBException('Migration script %s did not update '
'schema version to %s as expected. ' % (number,
script))
def RunQueryScript(self, script_path):
"""Run a .sql script file located at |script_path| on the database."""
with open(script_path, 'r') as f:
script = f.read()
queries = [q.strip() for q in script.split(';') if q.strip()]
for q in queries:
self.engine.execute(q)
class CIDBConnection(SchemaVersionedMySQLConnection):
"""Connection to a Continuous Integration database."""
def __init__(self):
super(CIDBConnection, self).__init__('cidb', CIDB_MIGRATIONS_DIR,
TEST_DB_CREDENTIALS_DIR)
@minimum_schema(1)
def TestMethodSchemaTooLow(self):
"""This method is a temporary one to test the minimum_schema decorator."""
@minimum_schema(0)
def TestMethodSchemaOK(self):
"""This method is a temporary one to test the minimum_schema decorator."""
| chadversary/chromiumos.chromite | lib/cidb.py | Python | bsd-3-clause | 6,868 |
# Copyright 2018, 2020, 2021 Smithsonian Astrophysical Observatory
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
# disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import numpy as np
import pytest
from pytest import approx
from sherpa.astro.data import DataIMG, DataIMGInt
from sherpa.astro.ui.utils import Session
from sherpa.data import Data1DInt, Data1D
from sherpa.models.basic import Box1D
from sherpa.models import Const1D, RegriddableModel1D, Parameter, Const2D, \
RegriddableModel2D, ArithmeticModel, Gauss2D, basic, model
from sherpa.utils.err import ModelErr
from sherpa.utils import neville, linear_interp
from sherpa.utils import akima
@pytest.fixture
def setup():
const = Const1D("const")
const.c0 = 0
const.c0.freeze()
my_model = MyModel("my_model")
my_model.integrate = False
return Session(), my_model, const
@pytest.fixture
def setup2d():
const = Const2D("const")
const.c0 = 0
const.c0.freeze()
x = [2, 3, 2, 3]
y = [2, 2, 3, 3]
xhi = [2.1, 3.5, 2.1, 3.5]
yhi = [2.1, 2.1, 3, 3.5]
# This is the result when rebinning [100, ] * 4
z = [225, ] * 4
my_model = MyModel2D("my_model")
return Session(), my_model, const, (x, y, xhi, yhi, z)
def test_evaluate_model_on_arbitrary_grid_point_list(setup):
"""
The idea of the test is that the model will evaluate differently depending on the grid it is evaluated on.
This is really arbitrary, it just exercises the high level API for a common workflow while making sure the results
are the expected ones.
"""
ui, my_model, const = setup
# Load data
ui.load_arrays(1, [1, 2, 3], [100, 100, 100])
# Get a model that evaluates on a different grid
# This is the important part.
regrid_model = my_model.regrid([1, 2, 2.5, 4, 5])
# The model will usually be part of a complex model expression, so let's pretend we add another component,
# although that component is muted.
ui.set_source(regrid_model + const)
# Fit and check the result
assert_fit(ui, my_model, 1)
# Now fit with a different grid.
# This is also the important part.
regrid_model.grid = [1, 2, 3, 4, 5]
assert_fit(ui, my_model, 0)
def test_evaluate_model_on_arbitrary_grid_point_list_2d(setup2d):
"""
The idea of the test is that the model will evaluate differently depending on the grid it is evaluated on.
This is really arbitrary, it just exercises the high level API for a common workflow while making sure the results
are the expected ones.
"""
ui, my_model, const, data = setup2d
x, y, _, _, z = data
# Load data
ui.load_arrays(1, x, y, z, DataIMG)
# Get a model that evaluates on a different grid
# This is the important part.
regrid_model = my_model.regrid([2, 2.5, 3], [2, 2.5, 3])
# The model will usually be part of a complex model expression, so let's pretend we add another component,
# although that component is muted.
ui.set_source(regrid_model + const)
# Fit and check the result
assert_fit(ui, my_model, (1, 1))
# Now fit with a different grid.
# This is also the important part.
regrid_model.grid = [2, 3], [2, 3]
assert_fit(ui, my_model, (0, 0))
def test_evaluate_model_on_arbitrary_grid_integrated_list(setup):
"""
Same as above, but with integrated models.
"""
ui, my_model, const = setup
# Load data
ui.load_arrays(1, [1.5, 2.5, 3.5], [2.5, 3.5, 4.5], [100, 100, 100], Data1DInt)
# Get a model that evaluates on a different grid
# This is the important part.
regrid_model = my_model.regrid([0, 1, 2], [1, 2, 3])
# The model will be part of a complex model expression, so let's pretend we add another component
ui.set_source(regrid_model + const)
# Fit and check the result
assert_fit(ui, my_model, 1)
# Now fit with a different grid.
# This is also the important part.
regrid_model.grid = [1.5, 2.5, 3.5], [2.5, 3.5, 4.5]
assert_fit(ui, my_model, 0)
def test_evaluate_model_on_arbitrary_grid_integrated_list_2d(setup2d):
"""
Same as above, but with integrated models
"""
ui, my_model, const, data = setup2d
x, y, xhi, yhi, z = data
# Load data
ui.load_arrays(1, x, y, xhi, yhi, z, DataIMGInt)
regrid_lo = [2, 2.5, 3]
regrid_hi = np.array([2, 2.5, 3.5])
# Get a model that evaluates on a different grid
# This is the important part.
regrid_model = my_model.regrid(regrid_lo, regrid_lo, regrid_hi, regrid_hi)
# The model will usually be part of a complex model expression, so let's pretend we add another component,
# although that component is muted.
ui.set_source(regrid_model + const)
# Fit and check the result
assert_fit(ui, my_model, (1, 1))
# Now fit with a different grid.
# This is also the important part.
regrid_model.grid = x, y, xhi, yhi
assert_fit(ui, my_model, (0, 0))
def test_evaluate_model_on_arbitrary_grid_point_ndarray(setup):
"""
The idea of the test is that the model will evaluate differently depending on the grid it is evaluated on.
This is really arbitrary, it just exercises the high level API for a common workflow while making sure the results
are the expected ones.
"""
ui, my_model, const = setup
# Load data
ui.load_arrays(1, [1, 2, 3], [100, 100, 100])
# Get a model that evaluates on a different grid
# This is the important part.
regrid_model = my_model.regrid(np.array([1, 2, 2.5, 4, 5]))
# The model will be part of a complex model expression, so let's pretend we add another component
ui.set_source(regrid_model + const)
# Fit and check the result
assert_fit(ui, my_model, 1)
# Now fit with a different regrid.
# This is also the important part.
regrid_model.grid = np.array([1, 2, 3, 4, 5])
assert_fit(ui, my_model, 0)
def test_evaluate_model_on_arbitrary_grid_integrated_ndarray(setup):
"""
Same as above, but with integrated models.
"""
ui, my_model, const = setup
# Load data
ui.load_arrays(1, [1.5, 2.5, 3.5], [2.5, 3.5, 4.5], [100, 100, 100], Data1DInt)
# Get a model that evaluates on a different grid
# This is the important part.
regrid_model = my_model.regrid(np.array([0, 1, 2]), [1, 2, 3])
# The model will be part of a complex model expression, so let's pretend we add another component
ui.set_source(regrid_model + const)
# Fit and check the result
assert_fit(ui, my_model, 1)
# Now fit with a different grid.
# This is also the important part.
regrid_model.grid = [1.5, 2.5, 3.5], np.array([2.5, 3.5, 4.5])
assert_fit(ui, my_model, 0)
def test_evaluate_model_on_arbitrary_grid_no_overlap(setup):
"""
If grids do not overlap, issue a warning and return zeros
"""
ui, my_model, _ = setup
# Get a model that evaluates on a different grid
# This is the important part. Note that there is overlap, but
# the start and end p
with pytest.raises(ModelErr) as excinfo:
my_model.regrid([2, 2.5], [2, 2.5])
assert ModelErr.dict['needsint'] in str(excinfo.value)
def test_evaluate_model_on_arbitrary_grid_no_overlap_2d(setup2d):
"""
In the 2D case, the overlap is way more stringent than in the 1D case, due to the complexity of rebinning
"""
ui, my_model, _, data = setup2d
x, y, _, _, _ = data
my_model.x_has_25 = 1 # To force the model to evaluate to something other than 0.
# Get a model that evaluates on a different grid
# This is the important part. Note that there is overlap, but
# the start and end points are different.
regrid_model = my_model.regrid([2, 2.5], [2, 2.5])
with pytest.warns(UserWarning):
np.testing.assert_array_equal(regrid_model(x, y), [0, 0, 0, 0])
def test_runtime_interp():
def tst_runtime_interp(model, requested, interp):
regrid_model = mdl.regrid(requested, interp=interp)
yregrid = regrid_model(xgrid)
return yregrid
xgrid = np.arange(2, 6, 0.1)
requested = np.arange(2.5, 5.1, 0.075)
mdl = Box1D()
mdl.xlow = 3.1
mdl.xhi = 4.2
mdl.ampl = 0.4
yregrid = tst_runtime_interp(mdl, requested, akima.akima)
assert 4.4 == approx(yregrid.sum())
yregrid = tst_runtime_interp(mdl, requested, linear_interp)
assert 4.4 == approx(yregrid.sum())
yregrid = tst_runtime_interp(mdl, requested, neville)
assert - 5.0e6 > yregrid.sum()
d = Data1D('tst', xgrid, np.ones_like(xgrid))
yexpected = d.eval_model(mdl)
requested = np.arange(2.5, 7, 0.2)
rmdl = mdl.regrid(requested)
ygot = d.eval_model(rmdl)
assert ygot == approx(yexpected)
def test_regrid_binaryop_1d():
"""issue #762, Cannot regrid a composite model (BinaryOpModel)"""
from sherpa.stats import LeastSq
from sherpa.fit import Fit
from sherpa.optmethods import LevMar
class MyConst1D(RegriddableModel1D):
def __init__(self, name='myconst1d'):
self.c0 = Parameter(name, 'c0', 3.1)
self.counter = 0
ArithmeticModel.__init__(self, name, (self.c0,))
def calc(self, par, *args, **kwargs):
x = args[0]
self.counter += x.size
return par[0]
class MyGauss(RegriddableModel1D):
def __init__(self, name='mygauss'):
self.sigma = Parameter(name, 'sigma', 10, min=0, max=10)
self.pos = Parameter(name, 'pos', 0, min=-10, max=10)
self.ampl = Parameter(name, 'ampl', 5)
self.counter = 0
ArithmeticModel.__init__(self, name, (self.sigma, self.pos, self.ampl))
def calc(self, par, *args, **kwargs):
sigma, pos, ampl = par[0], par[1], par[2]
x = args[0]
self.counter += x.size
return ampl * np.exp(-0.5 * (args[0] - pos)**2 / sigma**2)
np.random.seed(0)
leastsq = LeastSq()
levmar = LevMar()
mygauss = MyGauss()
myconst = MyConst1D()
mymodel = mygauss + myconst
x = np.linspace(-5., 5., 5)
err = 0.25
y = mymodel(x) + np.random.normal(mygauss.pos.val, err, x.shape)
mygauss.counter = 0
myconst.counter = 0
data = Data1D('one', x, y)
fit = Fit(data, mymodel, leastsq, levmar)
result = fit.fit()
assert result.numpoints == x.size
assert result.statval < 1.0
assert mygauss.counter == myconst.counter
assert (result.nfev + 4) * x.size == mygauss.counter
mygauss.counter = 0
myconst.counter = 0
x_regrid = np.linspace(-5., 5., 25)
mymodel_regrid = mymodel.regrid(x_regrid)
fit = Fit(data, mymodel_regrid, leastsq, levmar)
result = fit.fit()
assert result.numpoints == x.size
assert result.statval < 1.0
assert mygauss.counter == myconst.counter
assert (result.nfev + 4) * x_regrid.size == mygauss.counter
def test_regrid_binaryop_2d():
y0, x0 = np.mgrid[20:29, 10:20]
y0 = y0.flatten()
x0 = x0.flatten()
gmdl = Gauss2D()
gmdl.fwhm = 14
gmdl.xpos = 15
gmdl.ypos = 24
gmdl.ampl = 10
cmdl = Const2D()
cmdl.c0 = 4
xr1 = np.arange(10, 20, 1)
yr1 = np.arange(20, 29, 1)
rmdlg = gmdl.regrid(xr1, yr1)
rmdlc = cmdl.regrid(xr1, yr1)
shape = y0.shape
truthg = gmdl(x0, y0).reshape(shape)
truthc = cmdl(x0, y0).reshape(shape)
truth = truthg + truthc
ans1 = rmdlg(x0, y0).reshape(shape)
ans2 = rmdlc(x0, y0).reshape(shape)
assert (ans1 == truthg).all()
assert (ans2 == truthc).all()
rmdl = (gmdl + cmdl).regrid(xr1, yr1)
ans3 = rmdl(x0, y0).reshape(shape)
assert (ans3 == truth).all()
def test_regrid_call_behavior():
class Wrappable1D(model.RegriddableModel1D):
def __init__(self, cls, name):
self.ncalled = [] # record the number of elements
self.baseclass = cls
self.baseclass.__init__(self, name)
def calc(self, pars, xlo, *args, **kwargs):
xlo = np.asarray(xlo)
self.ncalled.append((xlo[0], xlo[-1], xlo.size))
return self.baseclass.calc(self, pars, xlo, *args, **kwargs)
m1 = Wrappable1D(basic.Const1D, 'm1')
m2 = Wrappable1D(basic.Gauss1D, 'm2')
m2.pos = 5
xregrid = np.arange(0, 20, 0.2)
xdata = np.arange(1.5, 12.5, 0.5)
morig = m1 + m2
mwrap = morig.regrid(xregrid)
# evaluate the model, we do not check the return value
_ = mwrap(xdata)
# Check both components were called with the same grid
assert m1.ncalled == m2.ncalled
# Check that m1 was called with the expected grid (ie that
# it is larger than xdata).
got = m1.ncalled
assert len(got) == 1
minval, maxval, nbins = m1.ncalled[0]
assert minval == pytest.approx(0)
assert maxval == pytest.approx(19.8)
assert nbins > xdata.size
assert nbins == 111
class MyModel(RegriddableModel1D):
"""
A model that returns [100, ] * len(x) if 2.5 is in the input array x
"""
def __init__(self, name):
self.has_25 = Parameter(name, "has_25", 0, min=0, max=1)
ArithmeticModel.__init__(self, name, (self.has_25,))
def guess(self, dep, *args, **kwargs):
raise NotImplementedError()
def get_center(self):
raise NotImplementedError()
def set_center(self, *args, **kwargs):
raise NotImplementedError()
def calc(self, p, *args, **kwargs):
x = args[0]
if 2.5 not in x:
if p[0] == 0:
return [100, ] * len(x)
else:
return [100-p[0] * 100, ] * len(x)
if p[0] == 1:
return [100, ] * len(x)
else:
return [p[0]*100, ] * len(x)
class MyModel2D(RegriddableModel2D):
"""
A 2D model that returns [100, ] * len(x) * len(y) if 2.5 is in the input arrays x and y
"""
def __init__(self, name):
self.x_has_25 = Parameter(name, "x_has_25", 0, min=0, max=1)
self.y_has_25 = Parameter(name, "y_has_25", 0, min=0, max=1)
RegriddableModel2D.__init__(self, name, (self.x_has_25, self.y_has_25))
def guess(self, dep, *args, **kwargs):
raise NotImplementedError()
def get_center(self):
raise NotImplementedError()
def set_center(self, *args, **kwargs):
raise NotImplementedError()
def calc(self, p, *args, **kwargs):
x, y, x_has_25, y_has_25 = args[0], args[1], p[0], p[1]
x_eval = np.array(self._eval(x, x_has_25))
y_eval = np.array(self._eval(y, y_has_25))
return (x_eval + y_eval)/2
def _eval(self, array, has_25):
if 2.5 not in array:
if has_25 == 0:
return [100, ] * len(array)
else:
return [100 - has_25 * 100, ] * len(array)
if has_25 == 1:
return [100, ] * len(array)
else:
return [has_25 * 100, ] * len(array)
def assert_fit(ui, model, value):
ui.fit()
try: # 2D, two values
len(value)
assert model.x_has_25.val == approx(value[0])
assert model.y_has_25.val == approx(value[1])
except TypeError: # 1D, one value
assert model.has_25.val == approx(value)
| anetasie/sherpa | sherpa/models/tests/test_regrid.py | Python | gpl-3.0 | 16,594 |
"""WSGI Components
This module implements WSGI Components.
"""
try:
from urllib.parse import unquote
except ImportError:
from urllib import unquote # NOQA
from operator import itemgetter
from traceback import format_tb
from types import GeneratorType
from sys import exc_info as _exc_info
from circuits.tools import tryimport
from circuits.core import handler, BaseComponent
StringIO = tryimport(("cStringIO", "StringIO", "io"), "StringIO")
from .http import HTTP
from .events import request
from .headers import Headers
from .errors import httperror
from circuits.web import wrappers
from .dispatchers import Dispatcher
def create_environ(errors, path, req):
environ = {}
env = environ.__setitem__
env("REQUEST_METHOD", req.method)
env("SERVER_NAME", req.host.split(":", 1)[0])
env("SERVER_PORT", "%i" % (req.server.port or 0))
env("SERVER_PROTOCOL", "HTTP/%d.%d" % req.server.http.protocol)
env("QUERY_STRING", req.qs)
env("SCRIPT_NAME", req.script_name)
env("CONTENT_TYPE", req.headers.get("Content-Type", ""))
env("CONTENT_LENGTH", req.headers.get("Content-Length", ""))
env("REMOTE_ADDR", req.remote.ip)
env("REMOTE_PORT", "%i" % (req.remote.port or 0))
env("wsgi.version", (1, 0))
env("wsgi.input", req.body)
env("wsgi.errors", errors)
env("wsgi.multithread", False)
env("wsgi.multiprocess", False)
env("wsgi.run_once", False)
env("wsgi.url_scheme", req.scheme)
if req.path:
req.script_name = req.path[:len(path)]
req.path = req.path[len(path):]
env("SCRIPT_NAME", req.script_name)
env("PATH_INFO", req.path)
for k, v in list(req.headers.items()):
env("HTTP_%s" % k.upper().replace("-", "_"), v)
return environ
class Application(BaseComponent):
channel = "web"
headerNames = {
"HTTP_CGI_AUTHORIZATION": "Authorization",
"CONTENT_LENGTH": "Content-Length",
"CONTENT_TYPE": "Content-Type",
"REMOTE_HOST": "Remote-Host",
"REMOTE_ADDR": "Remote-Addr",
}
def init(self):
self._finished = False
HTTP(self).register(self)
Dispatcher().register(self)
def translateHeaders(self, environ):
for cgiName in environ:
# We assume all incoming header keys are uppercase already.
if cgiName in self.headerNames:
yield self.headerNames[cgiName], environ[cgiName]
elif cgiName[:5] == "HTTP_":
# Hackish attempt at recovering original header names.
translatedHeader = cgiName[5:].replace("_", "-")
yield translatedHeader, environ[cgiName]
def getRequestResponse(self, environ):
env = environ.get
headers = Headers(list(self.translateHeaders(environ)))
protocol = tuple(map(int, env("SERVER_PROTOCOL")[5:].split(".")))
req = wrappers.Request(
None,
env("REQUEST_METHOD"),
env("wsgi.url_scheme"),
env("PATH_INFO", ""),
protocol,
env("QUERY_STRING", ""),
headers=headers
)
req.remote = wrappers.Host(env("REMOTE_ADDR"), env("REMTOE_PORT"))
req.script_name = env("SCRIPT_NAME")
req.wsgi_environ = environ
try:
cl = int(headers.get("Content-Length", "0"))
except:
cl = 0
req.body.write(env("wsgi.input").read(cl))
req.body.seek(0)
res = wrappers.Response(req)
res.gzip = "gzip" in req.headers.get("Accept-Encoding", "")
return req, res
def __call__(self, environ, start_response, exc_info=None):
self.request, self.response = self.getRequestResponse(environ)
self.fire(request(self.request, self.response))
self._finished = False
while self or not self._finished:
self.tick()
self.response.prepare()
body = self.response.body
status = self.response.status
headers = list(self.response.headers.items())
start_response(str(status), headers, exc_info)
return body
@handler("response", channel="web")
def response(self, event, response):
self._finished = True
event.stop()
@property
def host(self):
return ""
@property
def port(self):
return 0
@property
def secure(self):
return False
class _Empty(str):
def __bool__(self):
return True
__nonzero__ = __bool__
empty = _Empty()
del _Empty
class Gateway(BaseComponent):
channel = "web"
def init(self, apps):
self.apps = apps
self.errors = dict((k, StringIO()) for k in self.apps.keys())
@handler("request", priority=0.2)
def _on_request(self, event, req, res):
if not self.apps:
return
parts = req.path.split("/")
candidates = []
for i in range(len(parts)):
k = "/".join(parts[:(i + 1)]) or "/"
if k in self.apps:
candidates.append((k, self.apps[k]))
candidates = sorted(candidates, key=itemgetter(0), reverse=True)
if not candidates:
return
path, app = candidates[0]
buffer = StringIO()
def start_response(status, headers, exc_info=None):
res.status = int(status.split(" ", 1)[0])
for header in headers:
res.headers.add_header(*header)
return buffer.write
errors = self.errors[path]
environ = create_environ(errors, path, req)
try:
body = app(environ, start_response)
if isinstance(body, list):
body = "".join(body)
elif isinstance(body, GeneratorType):
res.body = body
res.stream = True
return res
if not body:
if not buffer.tell():
return empty
else:
buffer.seek(0)
return buffer
else:
return body
except Exception as error:
etype, evalue, etraceback = _exc_info()
error = (etype, evalue, format_tb(etraceback))
return httperror(req, res, 500, error=error)
finally:
event.stop()
| treemo/circuits | circuits/web/wsgi.py | Python | mit | 6,348 |
# -*- coding: UTF-8 -*-
"""Module for defining report element classes.
"""
import numpy as np
import pandas as pd
from .base import *
__all__ = __features__ = ["Code", "Footer", "Header", "Table", "Section",
"Text", "Title"]
HEAD_CSS = "@%(pos)s-left{%(left)s};@%(pos)s-center{%(center)s};@%(pos)s-righ" \
"t{%(right)s};"
class Footer(Element):
""" This class represents the footer of a report. """
pos = "bottom"
def __init__(self, left="", center="", right=""):
self.pos = self.__class__.pos
if self.pos == "bottom" and center == "":
center = "\" counter(page) \"/\" counter(pages) \""
for elm in ["left", "center", "right"]:
val = locals()[elm]
if isinstance(val, string_types):
val = Text(val, size=9)
if isinstance(val, Text):
setattr(self, elm, val.css())
@output
def css(self, text=TEXT):
return HEAD_CSS % self.__dict__
class Header(Footer):
""" This class represents the header of a report. """
pos = "top"
class Table(Element):
""" This class represents a table. """
filename = "table"
def __init__(self, data, col_headers=None, row_headers=None):
array = np.array(data) if not isinstance(data, np.ndarray) else data
kw = {}
if col_headers is not None:
kw['columns'] = col_headers
if row_headers is not None:
kw['index'] = row_headers
self._data = pd.DataFrame(data, **kw)
@output
def csv(self, text=TEXT, sep=',', index=True, float_fmt="%.2g"):
""" Generate a CSV table from the table data. """
return self._data.to_csv(sep=sep, index=index, float_format=float_fmt)
@output
def html(self, text=TEXT):
""" Generate an HTML table from the table data. """
return self._data.to_html()
@output
def json(self, text=TEXT):
""" Generate a JSON object form the table data. """
return self._data.to_json(orient='index')
@output
def md(self, text=TEXT, float_format="%.2g"):
""" Generate Markdown from the table data. """
cols = self._data.columns
hl = pd.DataFrame([["---"] * len(cols)], index=["---"], columns=cols)
df = pd.concat([hl, self._data])
return df.to_csv(sep='|', index=True, float_format=float_format)
@output
def xml(self, text=TEXT):
""" Generate an XML output from the report data. """
def convert(line):
xml = " <item>\n"
for f in line.index:
xml += " <field name=\"%s\">%s</field>\n" % (f, line[f])
xml += " </item>\n"
return xml
return "<items>\n" + '\n'.join(self._data.apply(convert, axis=1)) + \
"</items>"
class Text(Element):
""" Text area report element. """
def __init__(self, content, size="12", style="normal", color="black",
tag="p"):
self.content = content
self.tag = tag
self.style = "font-size:%(size)spx;font-style:%(style)s;color:" \
"%(color)s;" % locals()
@output
def css(self, text=TEXT):
return 'content:"%(content)s";%(style)s' % self.__dict__
@output
def html(self, text=TEXT):
return ('<%(tag)s style="%(style)s">%(content)s</%(tag)s>' % \
self.__dict__).replace("\n", "<br>")
@output
def md(self, text=TEXT):
return self.content
class Code(Text):
""" Code block report element. """
def __init__(self, code, size="10", style="normal", color="grey",
language=None, hl_lines=None):
super(Code, self).__init__(code, size, style, color, "pre")
self.language = language
self.hl_lines = hl_lines
@output
def html(self, text=TEXT):
s = "<pre"
if self.language:
s += ' class="%s hljs"' % self.language
s += ' style="%s">' % self.style
return s + str(self.content).replace("\n", "<br>") + "</pre>"
@output
def md(self, text=TEXT):
s = "```"
if self.language:
s += self.language
if self.hl_lines:
s += " hl_lines=\"%s\"" % self.hl_lines
return s + "\n%s\n```" % self.content
class Title(Text):
""" Title report element. """
def __init__(self, title, style="normal", color="black", tag="h1"):
self.content = title
self.tag = tag
self.style = "font-style:%(style)s;color:%(color)s;" % locals()
@output
def md(self, text=TEXT):
i = self.tag[-1]
if not i.isdigit():
raise ValueError("Title tag should be \"h[1-6]\"")
return "%(prefix)s %(content)s" % {'prefix': "#" * int(i),
'content': self.content}
class Section(Title):
""" Section report element. """
def __init__(self, title, style="normal", color="black", tag="h2"):
super(Section, self).__init__(title, style, color, tag)
| dhondta/tinyscript | tinyscript/report/objects.py | Python | agpl-3.0 | 5,115 |
#!/usr/bin/env python
class DiscUsage(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
"""
Attributes:
swaggerTypes (dict): The key is attribute name and the value is attribute type.
attributeMap (dict): The key is attribute name and the value is json key in definition.
"""
self.swaggerTypes = {
'UsedSize': 'long',
'TotalSize': 'long'
}
self.attributeMap = {
'UsedSize': 'UsedSize','TotalSize': 'TotalSize'}
self.UsedSize = None # long
self.TotalSize = None # long
| imranwar/AsposeStoragePHP | SDKs/Aspose.Storage_Cloud_SDK_For_Python/asposestoragecloud/models/DiscUsage.py | Python | mit | 713 |
#!/usr/bin/python
#============================ adjust path =====================================
import sys
import os
if __name__ == '__main__':
here = sys.path[0]
sys.path.insert(0, os.path.join(here, '..'))
#============================ imports =========================================
import Tkinter
import dustGuiLib
import dustFrame
from dustStyle import dustStyle
from SmartMeshSDK import ApiException
#============================ defines =========================================
WELL_KNOWN_ADDR_MANAGER = 'ff020000000000000000000000000002'
DEFAULT_DEST_PORT = '61000'
DEFAULT_HOST_ADDR = '20010470006600170000000000000002'
#============================ body ============================================
class dustFrameSensorTx(dustFrame.dustFrame):
ERROR = 'error'
NOERROR = 'noerror'
def __init__(self,parentElem,guiLock,frameName="sensor",row=0,column=0):
# record params
# local variables
self.connector = None
self.payloadCounter = 0
# init parent
dustFrame.dustFrame.__init__(self,parentElem,guiLock,frameName,row,column)
#row 0: slide
self.slide = Tkinter.Scale(self.container,
from_=0,
to=0xffff,
orient=Tkinter.HORIZONTAL)
self._add(self.slide,0,0,columnspan=3)
#row 1: label
temp = dustGuiLib.Label(self.container,
text='destination IPv6 address')
self._add(temp,1,0)
temp.configure(font=dustStyle.FONT_HEADER)
temp = dustGuiLib.Label(self.container,
text='dest. UDP port')
self._add(temp,1,1)
temp.configure(font=dustStyle.FONT_HEADER)
#row 2: send to manager
temp = dustGuiLib.Label(self.container,
text=WELL_KNOWN_ADDR_MANAGER)
self._add(temp,2,0)
self.mgrPortText = dustGuiLib.Text(self.container,
width=6,
height=1)
self.mgrPortText.insert(1.0,DEFAULT_DEST_PORT)
self._add(self.mgrPortText,2,1)
self.mgrButton = dustGuiLib.Button(self.container,
text='send to manager',
state=Tkinter.DISABLED,
command=self._sendMgr)
self._add(self.mgrButton,2,2)
#row 3: send to host
self.hostAddrText = dustGuiLib.Text(self.container,
width=35,
height=1)
self.hostAddrText.insert(1.0,DEFAULT_HOST_ADDR)
self._add(self.hostAddrText,3,0)
self.hostPortText = dustGuiLib.Text(self.container,
width=6,
height=1)
self.hostPortText.insert(1.0,DEFAULT_DEST_PORT)
self._add(self.hostPortText,3,1)
self.hostButton = dustGuiLib.Button(self.container,
text='send to host',
state=Tkinter.DISABLED,
command=self._sendHost)
self._add(self.hostButton,3,2)
#row 4: status
self.statusLabel = dustGuiLib.Label(self.container)
self._add(self.statusLabel,4,0,columnspan=3)
#======================== public ==========================================
def activate(self,connector,socketId):
# store params
self.connector = connector
self.socketId = socketId
# enable send buttons
self.mgrButton.config(state=Tkinter.NORMAL)
self.hostButton.config(state=Tkinter.NORMAL)
def disactivate(self):
# forget about the connector
self.connector = None
# disable send buttons
self.mgrButton.config(state=Tkinter.DISABLED)
self.hostButton.config(state=Tkinter.DISABLED)
#======================== private =========================================
def _sendMgr(self):
destAddr = WELL_KNOWN_ADDR_MANAGER
destPort = int(self.mgrPortText.get(1.0,Tkinter.END).strip())
self._sendInternal(destAddr,destPort)
def _sendHost(self):
destAddr = self.hostAddrText.get(1.0,Tkinter.END).strip()
try:
destPort = int(self.hostPortText.get(1.0,Tkinter.END).strip())
except ValueError:
self._printStatus(self.ERROR,"invalid port number")
self._sendInternal(destAddr,destPort)
#======================== helpers =========================================
def _sendInternal(self,destAddrString,destPort):
# format destAddr
destAddr = []
destAddrString = ''.join( destAddrString.split(" ") )
if len(destAddrString)%2!=0:
self._printStatus(self.ERROR,"destination address is not a even number of characters")
return
try:
for i in range(0, len(destAddrString), 2):
destAddr.append( int(destAddrString[i:i+2],16) )
except ValueError:
self._printStatus(self.ERROR,"destination address not hexadecimal numbers")
return
# prepare sensor data
sensorValRaw = self.slide.get()
sensorVal = [(sensorValRaw>>8)%256,(sensorValRaw>>0)%256]
try:
res = self.connector.dn_sendTo(
self.socketId,
destAddr,
destPort,
0,
1,
self.payloadCounter,
sensorVal
)
self.payloadCounter += 1
except ApiException.APIError as err:
self._printStatus(self.ERROR,str(err))
else:
assert(res.RC==0)
self._printStatus(self.NOERROR,"Sent succesfully")
def _printStatus(self,errorLevel,statusText):
self.statusLabel.config(text=statusText)
if errorLevel in [self.NOERROR]:
self.statusLabel.config(bg=dustStyle.COLOR_NOERROR)
elif errorLevel in [self.ERROR]:
self.statusLabel.config(bg=dustStyle.COLOR_ERROR)
else:
raise ValueError("unknown errorLevel {0}".format(errorLevel))
#============================ sample app ======================================
# The following gets called only if you run this module as a standalone app, by
# double-clicking on this source file. This code is NOT executed when importing
# this module is a larger application
#
class exampleApp(object):
def __init__(self):
self.window = dustWindow("dustFrameSensorTx",
self._closeCb)
self.guiLock = threading.Lock()
self.frame = dustFrameSensorTx(
self.window,
self.guiLock,
row=0,column=0)
self.frame.show()
self.window.mainloop()
def _closeCb(self):
print " _closeCb called"
if __name__ == '__main__':
import threading
from dustWindow import dustWindow
exampleApp() | bubbalinear/smartmeshsdk | dustUI/dustFrameSensorTx.py | Python | bsd-3-clause | 7,785 |
import _plotly_utils.basevalidators
class FamilyValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self,
plotly_name="family",
parent_name="scatter3d.legendgrouptitle.font",
**kwargs
):
super(FamilyValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "style"),
no_blank=kwargs.pop("no_blank", True),
strict=kwargs.pop("strict", True),
**kwargs
)
| plotly/plotly.py | packages/python/plotly/plotly/validators/scatter3d/legendgrouptitle/font/_family.py | Python | mit | 554 |
"""Computes active users over the last one year."""
import datetime
import logging
import isoweek
import luigi
from edx.analytics.tasks.common.mapreduce import MapReduceJobTask, MapReduceJobTaskMixin
from edx.analytics.tasks.common.pathutil import EventLogSelectionDownstreamMixin, EventLogSelectionMixin
from edx.analytics.tasks.common.vertica_load import IncrementalVerticaCopyTask, VerticaCopyTaskMixin
from edx.analytics.tasks.util import eventlog
from edx.analytics.tasks.util.hive import BareHiveTableTask, HivePartitionTask, WarehouseMixin
from edx.analytics.tasks.util.overwrite import OverwriteOutputMixin
from edx.analytics.tasks.util.url import get_target_from_url
from edx.analytics.tasks.util.weekly_interval import WeeklyIntervalMixin
log = logging.getLogger(__name__)
class ActiveUsersDownstreamMixin(
WarehouseMixin,
OverwriteOutputMixin,
MapReduceJobTaskMixin,
EventLogSelectionDownstreamMixin):
"""Common parameters needed for the workflow."""
pass
class ActiveUsersTask(ActiveUsersDownstreamMixin, EventLogSelectionMixin, MapReduceJobTask):
"""Task to compute active users."""
def mapper(self, line):
value = self.get_event_and_date_string(line)
if value is None:
return
event, date_string = value
username = eventlog.get_event_username(event)
if not username:
log.error("Encountered event with no username: %s", event)
self.incr_counter('Active Users last year', 'Discard Event Missing username', 1)
return
date = datetime.date(*[int(x) for x in date_string.split('-')])
iso_year, iso_weekofyear, _iso_weekday = date.isocalendar()
week = isoweek.Week(iso_year, iso_weekofyear)
start_date = week.monday().isoformat()
end_date = (week.sunday() + datetime.timedelta(1)).isoformat()
yield (start_date, end_date, username), 1
def reducer(self, key, _values):
yield key
def output(self):
output_url = self.hive_partition_path('active_users_per_week', self.interval.date_b)
return get_target_from_url(output_url)
def run(self):
self.remove_output_on_overwrite()
super(ActiveUsersTask, self).run()
def extra_modules(self):
import isoweek
return [isoweek]
class ActiveUsersTableTask(BareHiveTableTask):
"""Hive table that stores the active users over time."""
@property
def partition_by(self):
return 'dt'
@property
def table(self):
return 'active_users_per_week'
@property
def columns(self):
return [
('start_date', 'STRING'),
('end_date', 'STRING'),
('username', 'STRING'),
]
class ActiveUsersPartitionTask(ActiveUsersDownstreamMixin, HivePartitionTask):
"""Creates hive table partition to hold active users data."""
@property
def hive_table_task(self): # pragma: no cover
return ActiveUsersTableTask(
warehouse_path=self.warehouse_path,
)
@property
def partition_value(self):
""" Use a dynamic partition value based on the interval end date. """
return self.interval.date_b.isoformat()
@property
def data_task(self):
return ActiveUsersTask(
mapreduce_engine=self.mapreduce_engine,
n_reduce_tasks=self.n_reduce_tasks,
source=self.source,
pattern=self.pattern,
warehouse_path=self.warehouse_path,
interval=self.interval,
overwrite=self.overwrite,
)
class LoadInternalReportingActiveUsersToWarehouse(WeeklyIntervalMixin, ActiveUsersDownstreamMixin, IncrementalVerticaCopyTask):
"""Loads the active_users_this_year hive table into Vertica warehouse."""
HIVE_TABLE = 'active_users_per_week'
@property
def record_filter(self):
return "start_date='{start_date}' and end_date='{end_date}'".format(
start_date=self.interval.date_a.isoformat(),
end_date=self.interval.date_b.isoformat()
)
def update_id(self):
return '{task_name}(start_date={start_date},end_date={end_date})'.format(
task_name=self.task_family,
start_date=self.interval.date_a.isoformat(),
end_date=self.interval.date_b.isoformat()
)
@property
def insert_source_task(self):
return ActiveUsersPartitionTask(
interval=self.interval,
n_reduce_tasks=self.n_reduce_tasks,
warehouse_path=self.warehouse_path,
overwrite=self.overwrite,
).data_task
@property
def table(self):
return 'f_active_users_per_week'
@property
def auto_primary_key(self):
return None
@property
def default_columns(self):
"""List of tuples defining name and definition of automatically-filled columns."""
return None
@property
def columns(self):
return [
('start_date', 'DATE'),
('end_date', 'DATE'),
('username', 'VARCHAR(45) NOT NULL'),
]
class ActiveUsersWorkflow(ActiveUsersDownstreamMixin, VerticaCopyTaskMixin, luigi.WrapperTask):
date = luigi.DateParameter()
overwrite_n_weeks = luigi.IntParameter(default=1)
interval = None
def requires(self):
kwargs = {
'schema': self.schema,
'credentials': self.credentials,
'weeks': 1,
'warehouse_path': self.warehouse_path,
'n_reduce_tasks': self.n_reduce_tasks,
}
yield LoadInternalReportingActiveUsersToWarehouse(
end_date=self.date,
overwrite=self.overwrite,
**kwargs
)
weeks_to_overwrite = self.overwrite_n_weeks
end_date = self.date
while weeks_to_overwrite > 0:
end_date = end_date - datetime.timedelta(weeks=1)
yield LoadInternalReportingActiveUsersToWarehouse(
end_date=end_date,
overwrite=True,
**kwargs
)
weeks_to_overwrite -= 1
def complete(self):
# OverwriteOutputMixin changes the complete() method behavior, so we override it.
return all(r.complete() for r in luigi.task.flatten(self.requires()))
| Stanford-Online/edx-analytics-pipeline | edx/analytics/tasks/warehouse/load_internal_reporting_active_users.py | Python | agpl-3.0 | 6,350 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
from collections.abc import Iterable
import json
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from requests import Response
from requests import Request, PreparedRequest
from requests.sessions import Session
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.compute_v1.services.region_backend_services import (
RegionBackendServicesClient,
)
from google.cloud.compute_v1.services.region_backend_services import pagers
from google.cloud.compute_v1.services.region_backend_services import transports
from google.cloud.compute_v1.types import compute
from google.oauth2 import service_account
import google.auth
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert RegionBackendServicesClient._get_default_mtls_endpoint(None) is None
assert (
RegionBackendServicesClient._get_default_mtls_endpoint(api_endpoint)
== api_mtls_endpoint
)
assert (
RegionBackendServicesClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
RegionBackendServicesClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
RegionBackendServicesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert (
RegionBackendServicesClient._get_default_mtls_endpoint(non_googleapi)
== non_googleapi
)
@pytest.mark.parametrize(
"client_class,transport_name", [(RegionBackendServicesClient, "rest"),]
)
def test_region_backend_services_client_from_service_account_info(
client_class, transport_name
):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info, transport=transport_name)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == (
"compute.googleapis.com{}".format(":443")
if transport_name in ["grpc", "grpc_asyncio"]
else "https://{}".format("compute.googleapis.com")
)
@pytest.mark.parametrize(
"transport_class,transport_name",
[(transports.RegionBackendServicesRestTransport, "rest"),],
)
def test_region_backend_services_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize(
"client_class,transport_name", [(RegionBackendServicesClient, "rest"),]
)
def test_region_backend_services_client_from_service_account_file(
client_class, transport_name
):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file(
"dummy/file/path.json", transport=transport_name
)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json(
"dummy/file/path.json", transport=transport_name
)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == (
"compute.googleapis.com{}".format(":443")
if transport_name in ["grpc", "grpc_asyncio"]
else "https://{}".format("compute.googleapis.com")
)
def test_region_backend_services_client_get_transport_class():
transport = RegionBackendServicesClient.get_transport_class()
available_transports = [
transports.RegionBackendServicesRestTransport,
]
assert transport in available_transports
transport = RegionBackendServicesClient.get_transport_class("rest")
assert transport == transports.RegionBackendServicesRestTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
RegionBackendServicesClient,
transports.RegionBackendServicesRestTransport,
"rest",
),
],
)
@mock.patch.object(
RegionBackendServicesClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(RegionBackendServicesClient),
)
def test_region_backend_services_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(RegionBackendServicesClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(RegionBackendServicesClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class(transport=transport_name)
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class(transport=transport_name)
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(
RegionBackendServicesClient,
transports.RegionBackendServicesRestTransport,
"rest",
"true",
),
(
RegionBackendServicesClient,
transports.RegionBackendServicesRestTransport,
"rest",
"false",
),
],
)
@mock.patch.object(
RegionBackendServicesClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(RegionBackendServicesClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_region_backend_services_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize("client_class", [RegionBackendServicesClient])
@mock.patch.object(
RegionBackendServicesClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(RegionBackendServicesClient),
)
def test_region_backend_services_client_get_mtls_endpoint_and_cert_source(client_class):
mock_client_cert_source = mock.Mock()
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source == mock_client_cert_source
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
mock_client_cert_source = mock.Mock()
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=mock_client_cert_source,
):
(
api_endpoint,
cert_source,
) = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
RegionBackendServicesClient,
transports.RegionBackendServicesRestTransport,
"rest",
),
],
)
def test_region_backend_services_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[
(
RegionBackendServicesClient,
transports.RegionBackendServicesRestTransport,
"rest",
None,
),
],
)
def test_region_backend_services_client_client_options_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"request_type", [compute.DeleteRegionBackendServiceRequest, dict,]
)
def test_delete_unary_rest(request_type):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"backend_service": "sample3",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_delete_unary_rest_required_fields(
request_type=compute.DeleteRegionBackendServiceRequest,
):
transport_class = transports.RegionBackendServicesRestTransport
request_init = {}
request_init["backend_service"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).delete._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["backendService"] = "backend_service_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).delete._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "backendService" in jsonified_request
assert jsonified_request["backendService"] == "backend_service_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "delete",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_delete_unary_rest_unset_required_fields():
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.delete._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",)) & set(("backendService", "project", "region",))
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_delete_unary_rest_interceptors(null_interceptor):
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.RegionBackendServicesRestInterceptor(),
)
client = RegionBackendServicesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.RegionBackendServicesRestInterceptor, "post_delete"
) as post, mock.patch.object(
transports.RegionBackendServicesRestInterceptor, "pre_delete"
) as pre:
pre.assert_not_called()
post.assert_not_called()
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": None,
"query_params": {},
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.Operation.to_json(compute.Operation())
request = compute.DeleteRegionBackendServiceRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.Operation
client.delete_unary(
request, metadata=[("key", "val"), ("cephalopod", "squid"),]
)
pre.assert_called_once()
post.assert_called_once()
def test_delete_unary_rest_bad_request(
transport: str = "rest", request_type=compute.DeleteRegionBackendServiceRequest
):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"backend_service": "sample3",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.delete_unary(request)
def test_delete_unary_rest_flattened():
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"backend_service": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
backend_service="backend_service_value",
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.delete_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}"
% client.transport._host,
args[1],
)
def test_delete_unary_rest_flattened_error(transport: str = "rest"):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_unary(
compute.DeleteRegionBackendServiceRequest(),
project="project_value",
region="region_value",
backend_service="backend_service_value",
)
def test_delete_unary_rest_error():
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.GetRegionBackendServiceRequest, dict,]
)
def test_get_rest(request_type):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"backend_service": "sample3",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.BackendService(
affinity_cookie_ttl_sec=2432,
creation_timestamp="creation_timestamp_value",
custom_request_headers=["custom_request_headers_value"],
custom_response_headers=["custom_response_headers_value"],
description="description_value",
edge_security_policy="edge_security_policy_value",
enable_c_d_n=True,
fingerprint="fingerprint_value",
health_checks=["health_checks_value"],
id=205,
kind="kind_value",
load_balancing_scheme="load_balancing_scheme_value",
locality_lb_policy="locality_lb_policy_value",
name="name_value",
network="network_value",
port=453,
port_name="port_name_value",
protocol="protocol_value",
region="region_value",
security_policy="security_policy_value",
self_link="self_link_value",
session_affinity="session_affinity_value",
timeout_sec=1185,
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.BackendService.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.BackendService)
assert response.affinity_cookie_ttl_sec == 2432
assert response.creation_timestamp == "creation_timestamp_value"
assert response.custom_request_headers == ["custom_request_headers_value"]
assert response.custom_response_headers == ["custom_response_headers_value"]
assert response.description == "description_value"
assert response.edge_security_policy == "edge_security_policy_value"
assert response.enable_c_d_n is True
assert response.fingerprint == "fingerprint_value"
assert response.health_checks == ["health_checks_value"]
assert response.id == 205
assert response.kind == "kind_value"
assert response.load_balancing_scheme == "load_balancing_scheme_value"
assert response.locality_lb_policy == "locality_lb_policy_value"
assert response.name == "name_value"
assert response.network == "network_value"
assert response.port == 453
assert response.port_name == "port_name_value"
assert response.protocol == "protocol_value"
assert response.region == "region_value"
assert response.security_policy == "security_policy_value"
assert response.self_link == "self_link_value"
assert response.session_affinity == "session_affinity_value"
assert response.timeout_sec == 1185
def test_get_rest_required_fields(request_type=compute.GetRegionBackendServiceRequest):
transport_class = transports.RegionBackendServicesRestTransport
request_init = {}
request_init["backend_service"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["backendService"] = "backend_service_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "backendService" in jsonified_request
assert jsonified_request["backendService"] == "backend_service_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.BackendService()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.BackendService.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_get_rest_unset_required_fields():
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.get._get_unset_required_fields({})
assert set(unset_fields) == (
set(()) & set(("backendService", "project", "region",))
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_get_rest_interceptors(null_interceptor):
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.RegionBackendServicesRestInterceptor(),
)
client = RegionBackendServicesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.RegionBackendServicesRestInterceptor, "post_get"
) as post, mock.patch.object(
transports.RegionBackendServicesRestInterceptor, "pre_get"
) as pre:
pre.assert_not_called()
post.assert_not_called()
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": None,
"query_params": {},
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.BackendService.to_json(
compute.BackendService()
)
request = compute.GetRegionBackendServiceRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.BackendService
client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
pre.assert_called_once()
post.assert_called_once()
def test_get_rest_bad_request(
transport: str = "rest", request_type=compute.GetRegionBackendServiceRequest
):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"backend_service": "sample3",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get(request)
def test_get_rest_flattened():
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.BackendService()
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"backend_service": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
backend_service="backend_service_value",
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.BackendService.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.get(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}"
% client.transport._host,
args[1],
)
def test_get_rest_flattened_error(transport: str = "rest"):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get(
compute.GetRegionBackendServiceRequest(),
project="project_value",
region="region_value",
backend_service="backend_service_value",
)
def test_get_rest_error():
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.GetHealthRegionBackendServiceRequest, dict,]
)
def test_get_health_rest(request_type):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"backend_service": "sample3",
}
request_init["resource_group_reference_resource"] = {"group": "group_value"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.BackendServiceGroupHealth(kind="kind_value",)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.BackendServiceGroupHealth.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_health(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.BackendServiceGroupHealth)
assert response.kind == "kind_value"
def test_get_health_rest_required_fields(
request_type=compute.GetHealthRegionBackendServiceRequest,
):
transport_class = transports.RegionBackendServicesRestTransport
request_init = {}
request_init["backend_service"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_health._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["backendService"] = "backend_service_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_health._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "backendService" in jsonified_request
assert jsonified_request["backendService"] == "backend_service_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.BackendServiceGroupHealth()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.BackendServiceGroupHealth.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_health(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_get_health_rest_unset_required_fields():
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.get_health._get_unset_required_fields({})
assert set(unset_fields) == (
set(())
& set(
("backendService", "project", "region", "resourceGroupReferenceResource",)
)
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_get_health_rest_interceptors(null_interceptor):
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.RegionBackendServicesRestInterceptor(),
)
client = RegionBackendServicesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.RegionBackendServicesRestInterceptor, "post_get_health"
) as post, mock.patch.object(
transports.RegionBackendServicesRestInterceptor, "pre_get_health"
) as pre:
pre.assert_not_called()
post.assert_not_called()
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": None,
"query_params": {},
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.BackendServiceGroupHealth.to_json(
compute.BackendServiceGroupHealth()
)
request = compute.GetHealthRegionBackendServiceRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.BackendServiceGroupHealth
client.get_health(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
pre.assert_called_once()
post.assert_called_once()
def test_get_health_rest_bad_request(
transport: str = "rest", request_type=compute.GetHealthRegionBackendServiceRequest
):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"backend_service": "sample3",
}
request_init["resource_group_reference_resource"] = {"group": "group_value"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get_health(request)
def test_get_health_rest_flattened():
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.BackendServiceGroupHealth()
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"backend_service": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
backend_service="backend_service_value",
resource_group_reference_resource=compute.ResourceGroupReference(
group="group_value"
),
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.BackendServiceGroupHealth.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.get_health(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}/getHealth"
% client.transport._host,
args[1],
)
def test_get_health_rest_flattened_error(transport: str = "rest"):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_health(
compute.GetHealthRegionBackendServiceRequest(),
project="project_value",
region="region_value",
backend_service="backend_service_value",
resource_group_reference_resource=compute.ResourceGroupReference(
group="group_value"
),
)
def test_get_health_rest_error():
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.InsertRegionBackendServiceRequest, dict,]
)
def test_insert_unary_rest(request_type):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "region": "sample2"}
request_init["backend_service_resource"] = {
"affinity_cookie_ttl_sec": 2432,
"backends": [
{
"balancing_mode": "balancing_mode_value",
"capacity_scaler": 0.1575,
"description": "description_value",
"failover": True,
"group": "group_value",
"max_connections": 1608,
"max_connections_per_endpoint": 2990,
"max_connections_per_instance": 2978,
"max_rate": 849,
"max_rate_per_endpoint": 0.22310000000000002,
"max_rate_per_instance": 0.22190000000000001,
"max_utilization": 0.1633,
}
],
"cdn_policy": {
"bypass_cache_on_request_headers": [{"header_name": "header_name_value"}],
"cache_key_policy": {
"include_host": True,
"include_http_headers": [
"include_http_headers_value_1",
"include_http_headers_value_2",
],
"include_named_cookies": [
"include_named_cookies_value_1",
"include_named_cookies_value_2",
],
"include_protocol": True,
"include_query_string": True,
"query_string_blacklist": [
"query_string_blacklist_value_1",
"query_string_blacklist_value_2",
],
"query_string_whitelist": [
"query_string_whitelist_value_1",
"query_string_whitelist_value_2",
],
},
"cache_mode": "cache_mode_value",
"client_ttl": 1074,
"default_ttl": 1176,
"max_ttl": 761,
"negative_caching": True,
"negative_caching_policy": [{"code": 411, "ttl": 340}],
"request_coalescing": True,
"serve_while_stale": 1813,
"signed_url_cache_max_age_sec": 2890,
"signed_url_key_names": [
"signed_url_key_names_value_1",
"signed_url_key_names_value_2",
],
},
"circuit_breakers": {
"max_connections": 1608,
"max_pending_requests": 2149,
"max_requests": 1313,
"max_requests_per_connection": 2902,
"max_retries": 1187,
},
"connection_draining": {"draining_timeout_sec": 2124},
"connection_tracking_policy": {
"connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value",
"idle_timeout_sec": 1694,
"tracking_mode": "tracking_mode_value",
},
"consistent_hash": {
"http_cookie": {
"name": "name_value",
"path": "path_value",
"ttl": {"nanos": 543, "seconds": 751},
},
"http_header_name": "http_header_name_value",
"minimum_ring_size": 1829,
},
"creation_timestamp": "creation_timestamp_value",
"custom_request_headers": [
"custom_request_headers_value_1",
"custom_request_headers_value_2",
],
"custom_response_headers": [
"custom_response_headers_value_1",
"custom_response_headers_value_2",
],
"description": "description_value",
"edge_security_policy": "edge_security_policy_value",
"enable_c_d_n": True,
"failover_policy": {
"disable_connection_drain_on_failover": True,
"drop_traffic_if_unhealthy": True,
"failover_ratio": 0.1494,
},
"fingerprint": "fingerprint_value",
"health_checks": ["health_checks_value_1", "health_checks_value_2"],
"iap": {
"enabled": True,
"oauth2_client_id": "oauth2_client_id_value",
"oauth2_client_secret": "oauth2_client_secret_value",
"oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value",
},
"id": 205,
"kind": "kind_value",
"load_balancing_scheme": "load_balancing_scheme_value",
"locality_lb_policy": "locality_lb_policy_value",
"log_config": {"enable": True, "sample_rate": 0.1165},
"max_stream_duration": {},
"name": "name_value",
"network": "network_value",
"outlier_detection": {
"base_ejection_time": {},
"consecutive_errors": 1956,
"consecutive_gateway_failure": 2880,
"enforcing_consecutive_errors": 3006,
"enforcing_consecutive_gateway_failure": 3930,
"enforcing_success_rate": 2334,
"interval": {},
"max_ejection_percent": 2118,
"success_rate_minimum_hosts": 2799,
"success_rate_request_volume": 2915,
"success_rate_stdev_factor": 2663,
},
"port": 453,
"port_name": "port_name_value",
"protocol": "protocol_value",
"region": "region_value",
"security_policy": "security_policy_value",
"security_settings": {
"client_tls_policy": "client_tls_policy_value",
"subject_alt_names": [
"subject_alt_names_value_1",
"subject_alt_names_value_2",
],
},
"self_link": "self_link_value",
"session_affinity": "session_affinity_value",
"subsetting": {"policy": "policy_value"},
"timeout_sec": 1185,
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.insert_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_insert_unary_rest_required_fields(
request_type=compute.InsertRegionBackendServiceRequest,
):
transport_class = transports.RegionBackendServicesRestTransport
request_init = {}
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).insert._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).insert._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.insert_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_insert_unary_rest_unset_required_fields():
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.insert._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",)) & set(("backendServiceResource", "project", "region",))
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_insert_unary_rest_interceptors(null_interceptor):
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.RegionBackendServicesRestInterceptor(),
)
client = RegionBackendServicesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.RegionBackendServicesRestInterceptor, "post_insert"
) as post, mock.patch.object(
transports.RegionBackendServicesRestInterceptor, "pre_insert"
) as pre:
pre.assert_not_called()
post.assert_not_called()
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": None,
"query_params": {},
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.Operation.to_json(compute.Operation())
request = compute.InsertRegionBackendServiceRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.Operation
client.insert_unary(
request, metadata=[("key", "val"), ("cephalopod", "squid"),]
)
pre.assert_called_once()
post.assert_called_once()
def test_insert_unary_rest_bad_request(
transport: str = "rest", request_type=compute.InsertRegionBackendServiceRequest
):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "region": "sample2"}
request_init["backend_service_resource"] = {
"affinity_cookie_ttl_sec": 2432,
"backends": [
{
"balancing_mode": "balancing_mode_value",
"capacity_scaler": 0.1575,
"description": "description_value",
"failover": True,
"group": "group_value",
"max_connections": 1608,
"max_connections_per_endpoint": 2990,
"max_connections_per_instance": 2978,
"max_rate": 849,
"max_rate_per_endpoint": 0.22310000000000002,
"max_rate_per_instance": 0.22190000000000001,
"max_utilization": 0.1633,
}
],
"cdn_policy": {
"bypass_cache_on_request_headers": [{"header_name": "header_name_value"}],
"cache_key_policy": {
"include_host": True,
"include_http_headers": [
"include_http_headers_value_1",
"include_http_headers_value_2",
],
"include_named_cookies": [
"include_named_cookies_value_1",
"include_named_cookies_value_2",
],
"include_protocol": True,
"include_query_string": True,
"query_string_blacklist": [
"query_string_blacklist_value_1",
"query_string_blacklist_value_2",
],
"query_string_whitelist": [
"query_string_whitelist_value_1",
"query_string_whitelist_value_2",
],
},
"cache_mode": "cache_mode_value",
"client_ttl": 1074,
"default_ttl": 1176,
"max_ttl": 761,
"negative_caching": True,
"negative_caching_policy": [{"code": 411, "ttl": 340}],
"request_coalescing": True,
"serve_while_stale": 1813,
"signed_url_cache_max_age_sec": 2890,
"signed_url_key_names": [
"signed_url_key_names_value_1",
"signed_url_key_names_value_2",
],
},
"circuit_breakers": {
"max_connections": 1608,
"max_pending_requests": 2149,
"max_requests": 1313,
"max_requests_per_connection": 2902,
"max_retries": 1187,
},
"connection_draining": {"draining_timeout_sec": 2124},
"connection_tracking_policy": {
"connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value",
"idle_timeout_sec": 1694,
"tracking_mode": "tracking_mode_value",
},
"consistent_hash": {
"http_cookie": {
"name": "name_value",
"path": "path_value",
"ttl": {"nanos": 543, "seconds": 751},
},
"http_header_name": "http_header_name_value",
"minimum_ring_size": 1829,
},
"creation_timestamp": "creation_timestamp_value",
"custom_request_headers": [
"custom_request_headers_value_1",
"custom_request_headers_value_2",
],
"custom_response_headers": [
"custom_response_headers_value_1",
"custom_response_headers_value_2",
],
"description": "description_value",
"edge_security_policy": "edge_security_policy_value",
"enable_c_d_n": True,
"failover_policy": {
"disable_connection_drain_on_failover": True,
"drop_traffic_if_unhealthy": True,
"failover_ratio": 0.1494,
},
"fingerprint": "fingerprint_value",
"health_checks": ["health_checks_value_1", "health_checks_value_2"],
"iap": {
"enabled": True,
"oauth2_client_id": "oauth2_client_id_value",
"oauth2_client_secret": "oauth2_client_secret_value",
"oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value",
},
"id": 205,
"kind": "kind_value",
"load_balancing_scheme": "load_balancing_scheme_value",
"locality_lb_policy": "locality_lb_policy_value",
"log_config": {"enable": True, "sample_rate": 0.1165},
"max_stream_duration": {},
"name": "name_value",
"network": "network_value",
"outlier_detection": {
"base_ejection_time": {},
"consecutive_errors": 1956,
"consecutive_gateway_failure": 2880,
"enforcing_consecutive_errors": 3006,
"enforcing_consecutive_gateway_failure": 3930,
"enforcing_success_rate": 2334,
"interval": {},
"max_ejection_percent": 2118,
"success_rate_minimum_hosts": 2799,
"success_rate_request_volume": 2915,
"success_rate_stdev_factor": 2663,
},
"port": 453,
"port_name": "port_name_value",
"protocol": "protocol_value",
"region": "region_value",
"security_policy": "security_policy_value",
"security_settings": {
"client_tls_policy": "client_tls_policy_value",
"subject_alt_names": [
"subject_alt_names_value_1",
"subject_alt_names_value_2",
],
},
"self_link": "self_link_value",
"session_affinity": "session_affinity_value",
"subsetting": {"policy": "policy_value"},
"timeout_sec": 1185,
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.insert_unary(request)
def test_insert_unary_rest_flattened():
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "region": "sample2"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
backend_service_resource=compute.BackendService(
affinity_cookie_ttl_sec=2432
),
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.insert_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/regions/{region}/backendServices"
% client.transport._host,
args[1],
)
def test_insert_unary_rest_flattened_error(transport: str = "rest"):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.insert_unary(
compute.InsertRegionBackendServiceRequest(),
project="project_value",
region="region_value",
backend_service_resource=compute.BackendService(
affinity_cookie_ttl_sec=2432
),
)
def test_insert_unary_rest_error():
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.ListRegionBackendServicesRequest, dict,]
)
def test_list_rest(request_type):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "region": "sample2"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.BackendServiceList(
id="id_value",
kind="kind_value",
next_page_token="next_page_token_value",
self_link="self_link_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.BackendServiceList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListPager)
assert response.id == "id_value"
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
assert response.self_link == "self_link_value"
def test_list_rest_required_fields(
request_type=compute.ListRegionBackendServicesRequest,
):
transport_class = transports.RegionBackendServicesRestTransport
request_init = {}
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(
("filter", "max_results", "order_by", "page_token", "return_partial_success",)
)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.BackendServiceList()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.BackendServiceList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_list_rest_unset_required_fields():
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.list._get_unset_required_fields({})
assert set(unset_fields) == (
set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess",))
& set(("project", "region",))
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_list_rest_interceptors(null_interceptor):
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.RegionBackendServicesRestInterceptor(),
)
client = RegionBackendServicesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.RegionBackendServicesRestInterceptor, "post_list"
) as post, mock.patch.object(
transports.RegionBackendServicesRestInterceptor, "pre_list"
) as pre:
pre.assert_not_called()
post.assert_not_called()
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": None,
"query_params": {},
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.BackendServiceList.to_json(
compute.BackendServiceList()
)
request = compute.ListRegionBackendServicesRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.BackendServiceList
client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
pre.assert_called_once()
post.assert_called_once()
def test_list_rest_bad_request(
transport: str = "rest", request_type=compute.ListRegionBackendServicesRequest
):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "region": "sample2"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.list(request)
def test_list_rest_flattened():
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.BackendServiceList()
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "region": "sample2"}
# get truthy value for each flattened field
mock_args = dict(project="project_value", region="region_value",)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.BackendServiceList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.list(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/regions/{region}/backendServices"
% client.transport._host,
args[1],
)
def test_list_rest_flattened_error(transport: str = "rest"):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list(
compute.ListRegionBackendServicesRequest(),
project="project_value",
region="region_value",
)
def test_list_rest_pager(transport: str = "rest"):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
response = (
compute.BackendServiceList(
items=[
compute.BackendService(),
compute.BackendService(),
compute.BackendService(),
],
next_page_token="abc",
),
compute.BackendServiceList(items=[], next_page_token="def",),
compute.BackendServiceList(
items=[compute.BackendService(),], next_page_token="ghi",
),
compute.BackendServiceList(
items=[compute.BackendService(), compute.BackendService(),],
),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(compute.BackendServiceList.to_json(x) for x in response)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
sample_request = {"project": "sample1", "region": "sample2"}
pager = client.list(request=sample_request)
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, compute.BackendService) for i in results)
pages = list(client.list(request=sample_request).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize(
"request_type", [compute.PatchRegionBackendServiceRequest, dict,]
)
def test_patch_unary_rest(request_type):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"backend_service": "sample3",
}
request_init["backend_service_resource"] = {
"affinity_cookie_ttl_sec": 2432,
"backends": [
{
"balancing_mode": "balancing_mode_value",
"capacity_scaler": 0.1575,
"description": "description_value",
"failover": True,
"group": "group_value",
"max_connections": 1608,
"max_connections_per_endpoint": 2990,
"max_connections_per_instance": 2978,
"max_rate": 849,
"max_rate_per_endpoint": 0.22310000000000002,
"max_rate_per_instance": 0.22190000000000001,
"max_utilization": 0.1633,
}
],
"cdn_policy": {
"bypass_cache_on_request_headers": [{"header_name": "header_name_value"}],
"cache_key_policy": {
"include_host": True,
"include_http_headers": [
"include_http_headers_value_1",
"include_http_headers_value_2",
],
"include_named_cookies": [
"include_named_cookies_value_1",
"include_named_cookies_value_2",
],
"include_protocol": True,
"include_query_string": True,
"query_string_blacklist": [
"query_string_blacklist_value_1",
"query_string_blacklist_value_2",
],
"query_string_whitelist": [
"query_string_whitelist_value_1",
"query_string_whitelist_value_2",
],
},
"cache_mode": "cache_mode_value",
"client_ttl": 1074,
"default_ttl": 1176,
"max_ttl": 761,
"negative_caching": True,
"negative_caching_policy": [{"code": 411, "ttl": 340}],
"request_coalescing": True,
"serve_while_stale": 1813,
"signed_url_cache_max_age_sec": 2890,
"signed_url_key_names": [
"signed_url_key_names_value_1",
"signed_url_key_names_value_2",
],
},
"circuit_breakers": {
"max_connections": 1608,
"max_pending_requests": 2149,
"max_requests": 1313,
"max_requests_per_connection": 2902,
"max_retries": 1187,
},
"connection_draining": {"draining_timeout_sec": 2124},
"connection_tracking_policy": {
"connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value",
"idle_timeout_sec": 1694,
"tracking_mode": "tracking_mode_value",
},
"consistent_hash": {
"http_cookie": {
"name": "name_value",
"path": "path_value",
"ttl": {"nanos": 543, "seconds": 751},
},
"http_header_name": "http_header_name_value",
"minimum_ring_size": 1829,
},
"creation_timestamp": "creation_timestamp_value",
"custom_request_headers": [
"custom_request_headers_value_1",
"custom_request_headers_value_2",
],
"custom_response_headers": [
"custom_response_headers_value_1",
"custom_response_headers_value_2",
],
"description": "description_value",
"edge_security_policy": "edge_security_policy_value",
"enable_c_d_n": True,
"failover_policy": {
"disable_connection_drain_on_failover": True,
"drop_traffic_if_unhealthy": True,
"failover_ratio": 0.1494,
},
"fingerprint": "fingerprint_value",
"health_checks": ["health_checks_value_1", "health_checks_value_2"],
"iap": {
"enabled": True,
"oauth2_client_id": "oauth2_client_id_value",
"oauth2_client_secret": "oauth2_client_secret_value",
"oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value",
},
"id": 205,
"kind": "kind_value",
"load_balancing_scheme": "load_balancing_scheme_value",
"locality_lb_policy": "locality_lb_policy_value",
"log_config": {"enable": True, "sample_rate": 0.1165},
"max_stream_duration": {},
"name": "name_value",
"network": "network_value",
"outlier_detection": {
"base_ejection_time": {},
"consecutive_errors": 1956,
"consecutive_gateway_failure": 2880,
"enforcing_consecutive_errors": 3006,
"enforcing_consecutive_gateway_failure": 3930,
"enforcing_success_rate": 2334,
"interval": {},
"max_ejection_percent": 2118,
"success_rate_minimum_hosts": 2799,
"success_rate_request_volume": 2915,
"success_rate_stdev_factor": 2663,
},
"port": 453,
"port_name": "port_name_value",
"protocol": "protocol_value",
"region": "region_value",
"security_policy": "security_policy_value",
"security_settings": {
"client_tls_policy": "client_tls_policy_value",
"subject_alt_names": [
"subject_alt_names_value_1",
"subject_alt_names_value_2",
],
},
"self_link": "self_link_value",
"session_affinity": "session_affinity_value",
"subsetting": {"policy": "policy_value"},
"timeout_sec": 1185,
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.patch_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_patch_unary_rest_required_fields(
request_type=compute.PatchRegionBackendServiceRequest,
):
transport_class = transports.RegionBackendServicesRestTransport
request_init = {}
request_init["backend_service"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).patch._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["backendService"] = "backend_service_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).patch._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "backendService" in jsonified_request
assert jsonified_request["backendService"] == "backend_service_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "patch",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.patch_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_patch_unary_rest_unset_required_fields():
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.patch._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(("backendService", "backendServiceResource", "project", "region",))
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_patch_unary_rest_interceptors(null_interceptor):
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.RegionBackendServicesRestInterceptor(),
)
client = RegionBackendServicesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.RegionBackendServicesRestInterceptor, "post_patch"
) as post, mock.patch.object(
transports.RegionBackendServicesRestInterceptor, "pre_patch"
) as pre:
pre.assert_not_called()
post.assert_not_called()
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": None,
"query_params": {},
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.Operation.to_json(compute.Operation())
request = compute.PatchRegionBackendServiceRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.Operation
client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
pre.assert_called_once()
post.assert_called_once()
def test_patch_unary_rest_bad_request(
transport: str = "rest", request_type=compute.PatchRegionBackendServiceRequest
):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"backend_service": "sample3",
}
request_init["backend_service_resource"] = {
"affinity_cookie_ttl_sec": 2432,
"backends": [
{
"balancing_mode": "balancing_mode_value",
"capacity_scaler": 0.1575,
"description": "description_value",
"failover": True,
"group": "group_value",
"max_connections": 1608,
"max_connections_per_endpoint": 2990,
"max_connections_per_instance": 2978,
"max_rate": 849,
"max_rate_per_endpoint": 0.22310000000000002,
"max_rate_per_instance": 0.22190000000000001,
"max_utilization": 0.1633,
}
],
"cdn_policy": {
"bypass_cache_on_request_headers": [{"header_name": "header_name_value"}],
"cache_key_policy": {
"include_host": True,
"include_http_headers": [
"include_http_headers_value_1",
"include_http_headers_value_2",
],
"include_named_cookies": [
"include_named_cookies_value_1",
"include_named_cookies_value_2",
],
"include_protocol": True,
"include_query_string": True,
"query_string_blacklist": [
"query_string_blacklist_value_1",
"query_string_blacklist_value_2",
],
"query_string_whitelist": [
"query_string_whitelist_value_1",
"query_string_whitelist_value_2",
],
},
"cache_mode": "cache_mode_value",
"client_ttl": 1074,
"default_ttl": 1176,
"max_ttl": 761,
"negative_caching": True,
"negative_caching_policy": [{"code": 411, "ttl": 340}],
"request_coalescing": True,
"serve_while_stale": 1813,
"signed_url_cache_max_age_sec": 2890,
"signed_url_key_names": [
"signed_url_key_names_value_1",
"signed_url_key_names_value_2",
],
},
"circuit_breakers": {
"max_connections": 1608,
"max_pending_requests": 2149,
"max_requests": 1313,
"max_requests_per_connection": 2902,
"max_retries": 1187,
},
"connection_draining": {"draining_timeout_sec": 2124},
"connection_tracking_policy": {
"connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value",
"idle_timeout_sec": 1694,
"tracking_mode": "tracking_mode_value",
},
"consistent_hash": {
"http_cookie": {
"name": "name_value",
"path": "path_value",
"ttl": {"nanos": 543, "seconds": 751},
},
"http_header_name": "http_header_name_value",
"minimum_ring_size": 1829,
},
"creation_timestamp": "creation_timestamp_value",
"custom_request_headers": [
"custom_request_headers_value_1",
"custom_request_headers_value_2",
],
"custom_response_headers": [
"custom_response_headers_value_1",
"custom_response_headers_value_2",
],
"description": "description_value",
"edge_security_policy": "edge_security_policy_value",
"enable_c_d_n": True,
"failover_policy": {
"disable_connection_drain_on_failover": True,
"drop_traffic_if_unhealthy": True,
"failover_ratio": 0.1494,
},
"fingerprint": "fingerprint_value",
"health_checks": ["health_checks_value_1", "health_checks_value_2"],
"iap": {
"enabled": True,
"oauth2_client_id": "oauth2_client_id_value",
"oauth2_client_secret": "oauth2_client_secret_value",
"oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value",
},
"id": 205,
"kind": "kind_value",
"load_balancing_scheme": "load_balancing_scheme_value",
"locality_lb_policy": "locality_lb_policy_value",
"log_config": {"enable": True, "sample_rate": 0.1165},
"max_stream_duration": {},
"name": "name_value",
"network": "network_value",
"outlier_detection": {
"base_ejection_time": {},
"consecutive_errors": 1956,
"consecutive_gateway_failure": 2880,
"enforcing_consecutive_errors": 3006,
"enforcing_consecutive_gateway_failure": 3930,
"enforcing_success_rate": 2334,
"interval": {},
"max_ejection_percent": 2118,
"success_rate_minimum_hosts": 2799,
"success_rate_request_volume": 2915,
"success_rate_stdev_factor": 2663,
},
"port": 453,
"port_name": "port_name_value",
"protocol": "protocol_value",
"region": "region_value",
"security_policy": "security_policy_value",
"security_settings": {
"client_tls_policy": "client_tls_policy_value",
"subject_alt_names": [
"subject_alt_names_value_1",
"subject_alt_names_value_2",
],
},
"self_link": "self_link_value",
"session_affinity": "session_affinity_value",
"subsetting": {"policy": "policy_value"},
"timeout_sec": 1185,
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.patch_unary(request)
def test_patch_unary_rest_flattened():
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"backend_service": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
backend_service="backend_service_value",
backend_service_resource=compute.BackendService(
affinity_cookie_ttl_sec=2432
),
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.patch_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}"
% client.transport._host,
args[1],
)
def test_patch_unary_rest_flattened_error(transport: str = "rest"):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.patch_unary(
compute.PatchRegionBackendServiceRequest(),
project="project_value",
region="region_value",
backend_service="backend_service_value",
backend_service_resource=compute.BackendService(
affinity_cookie_ttl_sec=2432
),
)
def test_patch_unary_rest_error():
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.UpdateRegionBackendServiceRequest, dict,]
)
def test_update_unary_rest(request_type):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"backend_service": "sample3",
}
request_init["backend_service_resource"] = {
"affinity_cookie_ttl_sec": 2432,
"backends": [
{
"balancing_mode": "balancing_mode_value",
"capacity_scaler": 0.1575,
"description": "description_value",
"failover": True,
"group": "group_value",
"max_connections": 1608,
"max_connections_per_endpoint": 2990,
"max_connections_per_instance": 2978,
"max_rate": 849,
"max_rate_per_endpoint": 0.22310000000000002,
"max_rate_per_instance": 0.22190000000000001,
"max_utilization": 0.1633,
}
],
"cdn_policy": {
"bypass_cache_on_request_headers": [{"header_name": "header_name_value"}],
"cache_key_policy": {
"include_host": True,
"include_http_headers": [
"include_http_headers_value_1",
"include_http_headers_value_2",
],
"include_named_cookies": [
"include_named_cookies_value_1",
"include_named_cookies_value_2",
],
"include_protocol": True,
"include_query_string": True,
"query_string_blacklist": [
"query_string_blacklist_value_1",
"query_string_blacklist_value_2",
],
"query_string_whitelist": [
"query_string_whitelist_value_1",
"query_string_whitelist_value_2",
],
},
"cache_mode": "cache_mode_value",
"client_ttl": 1074,
"default_ttl": 1176,
"max_ttl": 761,
"negative_caching": True,
"negative_caching_policy": [{"code": 411, "ttl": 340}],
"request_coalescing": True,
"serve_while_stale": 1813,
"signed_url_cache_max_age_sec": 2890,
"signed_url_key_names": [
"signed_url_key_names_value_1",
"signed_url_key_names_value_2",
],
},
"circuit_breakers": {
"max_connections": 1608,
"max_pending_requests": 2149,
"max_requests": 1313,
"max_requests_per_connection": 2902,
"max_retries": 1187,
},
"connection_draining": {"draining_timeout_sec": 2124},
"connection_tracking_policy": {
"connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value",
"idle_timeout_sec": 1694,
"tracking_mode": "tracking_mode_value",
},
"consistent_hash": {
"http_cookie": {
"name": "name_value",
"path": "path_value",
"ttl": {"nanos": 543, "seconds": 751},
},
"http_header_name": "http_header_name_value",
"minimum_ring_size": 1829,
},
"creation_timestamp": "creation_timestamp_value",
"custom_request_headers": [
"custom_request_headers_value_1",
"custom_request_headers_value_2",
],
"custom_response_headers": [
"custom_response_headers_value_1",
"custom_response_headers_value_2",
],
"description": "description_value",
"edge_security_policy": "edge_security_policy_value",
"enable_c_d_n": True,
"failover_policy": {
"disable_connection_drain_on_failover": True,
"drop_traffic_if_unhealthy": True,
"failover_ratio": 0.1494,
},
"fingerprint": "fingerprint_value",
"health_checks": ["health_checks_value_1", "health_checks_value_2"],
"iap": {
"enabled": True,
"oauth2_client_id": "oauth2_client_id_value",
"oauth2_client_secret": "oauth2_client_secret_value",
"oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value",
},
"id": 205,
"kind": "kind_value",
"load_balancing_scheme": "load_balancing_scheme_value",
"locality_lb_policy": "locality_lb_policy_value",
"log_config": {"enable": True, "sample_rate": 0.1165},
"max_stream_duration": {},
"name": "name_value",
"network": "network_value",
"outlier_detection": {
"base_ejection_time": {},
"consecutive_errors": 1956,
"consecutive_gateway_failure": 2880,
"enforcing_consecutive_errors": 3006,
"enforcing_consecutive_gateway_failure": 3930,
"enforcing_success_rate": 2334,
"interval": {},
"max_ejection_percent": 2118,
"success_rate_minimum_hosts": 2799,
"success_rate_request_volume": 2915,
"success_rate_stdev_factor": 2663,
},
"port": 453,
"port_name": "port_name_value",
"protocol": "protocol_value",
"region": "region_value",
"security_policy": "security_policy_value",
"security_settings": {
"client_tls_policy": "client_tls_policy_value",
"subject_alt_names": [
"subject_alt_names_value_1",
"subject_alt_names_value_2",
],
},
"self_link": "self_link_value",
"session_affinity": "session_affinity_value",
"subsetting": {"policy": "policy_value"},
"timeout_sec": 1185,
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_update_unary_rest_required_fields(
request_type=compute.UpdateRegionBackendServiceRequest,
):
transport_class = transports.RegionBackendServicesRestTransport
request_init = {}
request_init["backend_service"] = ""
request_init["project"] = ""
request_init["region"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).update._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["backendService"] = "backend_service_value"
jsonified_request["project"] = "project_value"
jsonified_request["region"] = "region_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).update._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "backendService" in jsonified_request
assert jsonified_request["backendService"] == "backend_service_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "region" in jsonified_request
assert jsonified_request["region"] == "region_value"
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "put",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_update_unary_rest_unset_required_fields():
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.update._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(("backendService", "backendServiceResource", "project", "region",))
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_update_unary_rest_interceptors(null_interceptor):
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None
if null_interceptor
else transports.RegionBackendServicesRestInterceptor(),
)
client = RegionBackendServicesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.RegionBackendServicesRestInterceptor, "post_update"
) as post, mock.patch.object(
transports.RegionBackendServicesRestInterceptor, "pre_update"
) as pre:
pre.assert_not_called()
post.assert_not_called()
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": None,
"query_params": {},
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.Operation.to_json(compute.Operation())
request = compute.UpdateRegionBackendServiceRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.Operation
client.update_unary(
request, metadata=[("key", "val"), ("cephalopod", "squid"),]
)
pre.assert_called_once()
post.assert_called_once()
def test_update_unary_rest_bad_request(
transport: str = "rest", request_type=compute.UpdateRegionBackendServiceRequest
):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {
"project": "sample1",
"region": "sample2",
"backend_service": "sample3",
}
request_init["backend_service_resource"] = {
"affinity_cookie_ttl_sec": 2432,
"backends": [
{
"balancing_mode": "balancing_mode_value",
"capacity_scaler": 0.1575,
"description": "description_value",
"failover": True,
"group": "group_value",
"max_connections": 1608,
"max_connections_per_endpoint": 2990,
"max_connections_per_instance": 2978,
"max_rate": 849,
"max_rate_per_endpoint": 0.22310000000000002,
"max_rate_per_instance": 0.22190000000000001,
"max_utilization": 0.1633,
}
],
"cdn_policy": {
"bypass_cache_on_request_headers": [{"header_name": "header_name_value"}],
"cache_key_policy": {
"include_host": True,
"include_http_headers": [
"include_http_headers_value_1",
"include_http_headers_value_2",
],
"include_named_cookies": [
"include_named_cookies_value_1",
"include_named_cookies_value_2",
],
"include_protocol": True,
"include_query_string": True,
"query_string_blacklist": [
"query_string_blacklist_value_1",
"query_string_blacklist_value_2",
],
"query_string_whitelist": [
"query_string_whitelist_value_1",
"query_string_whitelist_value_2",
],
},
"cache_mode": "cache_mode_value",
"client_ttl": 1074,
"default_ttl": 1176,
"max_ttl": 761,
"negative_caching": True,
"negative_caching_policy": [{"code": 411, "ttl": 340}],
"request_coalescing": True,
"serve_while_stale": 1813,
"signed_url_cache_max_age_sec": 2890,
"signed_url_key_names": [
"signed_url_key_names_value_1",
"signed_url_key_names_value_2",
],
},
"circuit_breakers": {
"max_connections": 1608,
"max_pending_requests": 2149,
"max_requests": 1313,
"max_requests_per_connection": 2902,
"max_retries": 1187,
},
"connection_draining": {"draining_timeout_sec": 2124},
"connection_tracking_policy": {
"connection_persistence_on_unhealthy_backends": "connection_persistence_on_unhealthy_backends_value",
"idle_timeout_sec": 1694,
"tracking_mode": "tracking_mode_value",
},
"consistent_hash": {
"http_cookie": {
"name": "name_value",
"path": "path_value",
"ttl": {"nanos": 543, "seconds": 751},
},
"http_header_name": "http_header_name_value",
"minimum_ring_size": 1829,
},
"creation_timestamp": "creation_timestamp_value",
"custom_request_headers": [
"custom_request_headers_value_1",
"custom_request_headers_value_2",
],
"custom_response_headers": [
"custom_response_headers_value_1",
"custom_response_headers_value_2",
],
"description": "description_value",
"edge_security_policy": "edge_security_policy_value",
"enable_c_d_n": True,
"failover_policy": {
"disable_connection_drain_on_failover": True,
"drop_traffic_if_unhealthy": True,
"failover_ratio": 0.1494,
},
"fingerprint": "fingerprint_value",
"health_checks": ["health_checks_value_1", "health_checks_value_2"],
"iap": {
"enabled": True,
"oauth2_client_id": "oauth2_client_id_value",
"oauth2_client_secret": "oauth2_client_secret_value",
"oauth2_client_secret_sha256": "oauth2_client_secret_sha256_value",
},
"id": 205,
"kind": "kind_value",
"load_balancing_scheme": "load_balancing_scheme_value",
"locality_lb_policy": "locality_lb_policy_value",
"log_config": {"enable": True, "sample_rate": 0.1165},
"max_stream_duration": {},
"name": "name_value",
"network": "network_value",
"outlier_detection": {
"base_ejection_time": {},
"consecutive_errors": 1956,
"consecutive_gateway_failure": 2880,
"enforcing_consecutive_errors": 3006,
"enforcing_consecutive_gateway_failure": 3930,
"enforcing_success_rate": 2334,
"interval": {},
"max_ejection_percent": 2118,
"success_rate_minimum_hosts": 2799,
"success_rate_request_volume": 2915,
"success_rate_stdev_factor": 2663,
},
"port": 453,
"port_name": "port_name_value",
"protocol": "protocol_value",
"region": "region_value",
"security_policy": "security_policy_value",
"security_settings": {
"client_tls_policy": "client_tls_policy_value",
"subject_alt_names": [
"subject_alt_names_value_1",
"subject_alt_names_value_2",
],
},
"self_link": "self_link_value",
"session_affinity": "session_affinity_value",
"subsetting": {"policy": "policy_value"},
"timeout_sec": 1185,
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.update_unary(request)
def test_update_unary_rest_flattened():
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"region": "sample2",
"backend_service": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
region="region_value",
backend_service="backend_service_value",
backend_service_resource=compute.BackendService(
affinity_cookie_ttl_sec=2432
),
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.update_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}"
% client.transport._host,
args[1],
)
def test_update_unary_rest_flattened_error(transport: str = "rest"):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_unary(
compute.UpdateRegionBackendServiceRequest(),
project="project_value",
region="region_value",
backend_service="backend_service_value",
backend_service_resource=compute.BackendService(
affinity_cookie_ttl_sec=2432
),
)
def test_update_unary_rest_error():
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = RegionBackendServicesClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide an api_key and a transport instance.
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
options = client_options.ClientOptions()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = RegionBackendServicesClient(
client_options=options, transport=transport,
)
# It is an error to provide an api_key and a credential.
options = mock.Mock()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = RegionBackendServicesClient(
client_options=options, credentials=ga_credentials.AnonymousCredentials()
)
# It is an error to provide scopes and a transport instance.
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = RegionBackendServicesClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.RegionBackendServicesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = RegionBackendServicesClient(transport=transport)
assert client.transport is transport
@pytest.mark.parametrize(
"transport_class", [transports.RegionBackendServicesRestTransport,]
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_region_backend_services_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.RegionBackendServicesTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_region_backend_services_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.compute_v1.services.region_backend_services.transports.RegionBackendServicesTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.RegionBackendServicesTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"delete",
"get",
"get_health",
"insert",
"list",
"patch",
"update",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
def test_region_backend_services_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.compute_v1.services.region_backend_services.transports.RegionBackendServicesTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.RegionBackendServicesTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
def test_region_backend_services_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.compute_v1.services.region_backend_services.transports.RegionBackendServicesTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.RegionBackendServicesTransport()
adc.assert_called_once()
def test_region_backend_services_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
RegionBackendServicesClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id=None,
)
def test_region_backend_services_http_transport_client_cert_source_for_mtls():
cred = ga_credentials.AnonymousCredentials()
with mock.patch(
"google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
) as mock_configure_mtls_channel:
transports.RegionBackendServicesRestTransport(
credentials=cred, client_cert_source_for_mtls=client_cert_source_callback
)
mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
@pytest.mark.parametrize("transport_name", ["rest",])
def test_region_backend_services_host_no_port(transport_name):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com"
),
transport=transport_name,
)
assert client.transport._host == (
"compute.googleapis.com:443"
if transport_name in ["grpc", "grpc_asyncio"]
else "https://compute.googleapis.com"
)
@pytest.mark.parametrize("transport_name", ["rest",])
def test_region_backend_services_host_with_port(transport_name):
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com:8000"
),
transport=transport_name,
)
assert client.transport._host == (
"compute.googleapis.com:8000"
if transport_name in ["grpc", "grpc_asyncio"]
else "https://compute.googleapis.com:8000"
)
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = RegionBackendServicesClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = RegionBackendServicesClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = RegionBackendServicesClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder,)
actual = RegionBackendServicesClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = RegionBackendServicesClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = RegionBackendServicesClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization,)
actual = RegionBackendServicesClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = RegionBackendServicesClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = RegionBackendServicesClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project,)
actual = RegionBackendServicesClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = RegionBackendServicesClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = RegionBackendServicesClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = RegionBackendServicesClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = RegionBackendServicesClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = RegionBackendServicesClient.parse_common_location_path(path)
assert expected == actual
def test_client_with_default_client_info():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.RegionBackendServicesTransport, "_prep_wrapped_messages"
) as prep:
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.RegionBackendServicesTransport, "_prep_wrapped_messages"
) as prep:
transport_class = RegionBackendServicesClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
def test_transport_close():
transports = {
"rest": "_session",
}
for transport, close_name in transports.items():
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"rest",
]
for transport in transports:
client = RegionBackendServicesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
@pytest.mark.parametrize(
"client_class,transport_class",
[(RegionBackendServicesClient, transports.RegionBackendServicesRestTransport),],
)
def test_api_key_credentials(client_class, transport_class):
with mock.patch.object(
google.auth._default, "get_api_key_credentials", create=True
) as get_api_key_credentials:
mock_cred = mock.Mock()
get_api_key_credentials.return_value = mock_cred
options = client_options.ClientOptions()
options.api_key = "api_key"
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=mock_cred,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
| googleapis/python-compute | tests/unit/gapic/compute_v1/test_region_backend_services.py | Python | apache-2.0 | 149,814 |
# Copyright 2010 OpenStack Foundation
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import datetime
from http import client as http_client
import os
import shutil
from urllib import parse as urlparse
from oslo_log import log
from oslo_utils import strutils
from oslo_utils import uuidutils
import requests
from ironic.common import exception
from ironic.common.glance_service.image_service import GlanceImageService
from ironic.common.i18n import _
from ironic.common import utils
from ironic.conf import CONF
IMAGE_CHUNK_SIZE = 1024 * 1024 # 1mb
# NOTE(kaifeng) Image will be truncated to 2GiB by sendfile,
# we use a large chunk size here for a better performance
# while keep the chunk size less than the size limit.
SENDFILE_CHUNK_SIZE = 1024 * 1024 * 1024 # 1Gb
LOG = log.getLogger(__name__)
class BaseImageService(object, metaclass=abc.ABCMeta):
"""Provides retrieval of disk images."""
@abc.abstractmethod
def validate_href(self, image_href):
"""Validate image reference.
:param image_href: Image reference.
:raises: exception.ImageRefValidationFailed.
:returns: Information needed to further operate with an image.
"""
@abc.abstractmethod
def download(self, image_href, image_file):
"""Downloads image to specified location.
:param image_href: Image reference.
:param image_file: File object to write data to.
:raises: exception.ImageRefValidationFailed.
:raises: exception.ImageDownloadFailed.
"""
@abc.abstractmethod
def show(self, image_href):
"""Get dictionary of image properties.
:param image_href: Image reference.
:raises: exception.ImageRefValidationFailed.
:returns: dictionary of image properties. It has three of them: 'size',
'updated_at' and 'properties'. 'updated_at' attribute is a naive
UTC datetime object.
"""
class HttpImageService(BaseImageService):
"""Provides retrieval of disk images using HTTP."""
def validate_href(self, image_href, secret=False):
"""Validate HTTP image reference.
:param image_href: Image reference.
:param secret: Specify if image_href being validated should not be
shown in exception message.
:raises: exception.ImageRefValidationFailed if HEAD request failed or
returned response code not equal to 200.
:returns: Response to HEAD request.
"""
output_url = 'secreturl' if secret else image_href
try:
verify = strutils.bool_from_string(CONF.webserver_verify_ca,
strict=True)
except ValueError:
verify = CONF.webserver_verify_ca
try:
response = requests.head(image_href, verify=verify,
timeout=CONF.webserver_connection_timeout)
if response.status_code != http_client.OK:
raise exception.ImageRefValidationFailed(
image_href=output_url,
reason=_("Got HTTP code %s instead of 200 in response "
"to HEAD request.") % response.status_code)
except (OSError, requests.ConnectionError,
requests.RequestException) as e:
raise exception.ImageRefValidationFailed(image_href=output_url,
reason=str(e))
return response
def download(self, image_href, image_file):
"""Downloads image to specified location.
:param image_href: Image reference.
:param image_file: File object to write data to.
:raises: exception.ImageRefValidationFailed if GET request returned
response code not equal to 200.
:raises: exception.ImageDownloadFailed if:
* IOError happened during file write;
* GET request failed.
"""
try:
verify = strutils.bool_from_string(CONF.webserver_verify_ca,
strict=True)
except ValueError:
verify = CONF.webserver_verify_ca
try:
response = requests.get(image_href, stream=True, verify=verify,
timeout=CONF.webserver_connection_timeout)
if response.status_code != http_client.OK:
raise exception.ImageRefValidationFailed(
image_href=image_href,
reason=_("Got HTTP code %s instead of 200 in response "
"to GET request.") % response.status_code)
with response.raw as input_img:
shutil.copyfileobj(input_img, image_file, IMAGE_CHUNK_SIZE)
except (OSError, requests.ConnectionError, requests.RequestException,
IOError) as e:
raise exception.ImageDownloadFailed(image_href=image_href,
reason=str(e))
def show(self, image_href):
"""Get dictionary of image properties.
:param image_href: Image reference.
:raises: exception.ImageRefValidationFailed if:
* HEAD request failed;
* HEAD request returned response code not equal to 200;
* Content-Length header not found in response to HEAD request.
:returns: dictionary of image properties. It has three of them: 'size',
'updated_at' and 'properties'. 'updated_at' attribute is a naive
UTC datetime object.
"""
response = self.validate_href(image_href)
image_size = response.headers.get('Content-Length')
if image_size is None:
raise exception.ImageRefValidationFailed(
image_href=image_href,
reason=_("Cannot determine image size as there is no "
"Content-Length header specified in response "
"to HEAD request."))
# Parse last-modified header to return naive datetime object
str_date = response.headers.get('Last-Modified')
date = None
if str_date:
http_date_format_strings = [
'%a, %d %b %Y %H:%M:%S GMT', # RFC 822
'%A, %d-%b-%y %H:%M:%S GMT', # RFC 850
'%a %b %d %H:%M:%S %Y' # ANSI C
]
for fmt in http_date_format_strings:
try:
date = datetime.datetime.strptime(str_date, fmt)
break
except ValueError:
continue
no_cache = 'no-store' in response.headers.get('Cache-Control', '')
return {
'size': int(image_size),
'updated_at': date,
'properties': {},
'no_cache': no_cache,
}
class FileImageService(BaseImageService):
"""Provides retrieval of disk images available locally on the conductor."""
def validate_href(self, image_href):
"""Validate local image reference.
:param image_href: Image reference.
:raises: exception.ImageRefValidationFailed if source image file
doesn't exist.
:returns: Path to image file if it exists.
"""
image_path = urlparse.urlparse(image_href).path
if not os.path.isfile(image_path):
raise exception.ImageRefValidationFailed(
image_href=image_href,
reason=_("Specified image file not found."))
return image_path
def download(self, image_href, image_file):
"""Downloads image to specified location.
:param image_href: Image reference.
:param image_file: File object to write data to.
:raises: exception.ImageRefValidationFailed if source image file
doesn't exist.
:raises: exception.ImageDownloadFailed if exceptions were raised while
writing to file or creating hard link.
"""
source_image_path = self.validate_href(image_href)
dest_image_path = image_file.name
local_device = os.stat(dest_image_path).st_dev
try:
# We should have read and write access to source file to create
# hard link to it.
if (local_device == os.stat(source_image_path).st_dev
and os.access(source_image_path, os.R_OK | os.W_OK)):
image_file.close()
os.remove(dest_image_path)
os.link(source_image_path, dest_image_path)
else:
filesize = os.path.getsize(source_image_path)
offset = 0
with open(source_image_path, 'rb') as input_img:
while offset < filesize:
count = min(SENDFILE_CHUNK_SIZE, filesize - offset)
nbytes_out = os.sendfile(image_file.fileno(),
input_img.fileno(),
offset,
count)
offset += nbytes_out
except Exception as e:
raise exception.ImageDownloadFailed(image_href=image_href,
reason=str(e))
def show(self, image_href):
"""Get dictionary of image properties.
:param image_href: Image reference.
:raises: exception.ImageRefValidationFailed if image file specified
doesn't exist.
:returns: dictionary of image properties. It has three of them: 'size',
'updated_at' and 'properties'. 'updated_at' attribute is a naive
UTC datetime object.
"""
source_image_path = self.validate_href(image_href)
return {
'size': os.path.getsize(source_image_path),
'updated_at': utils.unix_file_modification_datetime(
source_image_path),
'properties': {},
# No point in caching local file images
'no_cache': True,
}
protocol_mapping = {
'http': HttpImageService,
'https': HttpImageService,
'file': FileImageService,
'glance': GlanceImageService,
}
def get_image_service(image_href, client=None, context=None):
"""Get image service instance to download the image.
:param image_href: String containing href to get image service for.
:param client: Glance client to be used for download, used only if
image_href is Glance href.
:param context: request context, used only if image_href is Glance href.
:raises: exception.ImageRefValidationFailed if no image service can
handle specified href.
:returns: Instance of an image service class that is able to download
specified image.
"""
scheme = urlparse.urlparse(image_href).scheme.lower()
if not scheme:
if uuidutils.is_uuid_like(str(image_href)):
cls = GlanceImageService
else:
raise exception.ImageRefValidationFailed(
image_href=image_href,
reason=_('Scheme-less image href is not a UUID.'))
else:
cls = protocol_mapping.get(scheme)
if not cls:
raise exception.ImageRefValidationFailed(
image_href=image_href,
reason=_('Image download protocol %s is not supported.'
) % scheme)
if cls == GlanceImageService:
return cls(client, context)
return cls()
| openstack/ironic | ironic/common/image_service.py | Python | apache-2.0 | 12,149 |
"""
This module contains helper functions for controlling caching. It does so by
managing the "Vary" header of responses. It includes functions to patch the
header of response objects directly and decorators that change functions to do
that header-patching themselves.
For information on the Vary header, see:
https://tools.ietf.org/html/rfc7231#section-7.1.4
Essentially, the "Vary" HTTP header defines which headers a cache should take
into account when building its cache key. Requests with the same path but
different header content for headers named in "Vary" need to get different
cache keys to prevent delivery of wrong content.
An example: i18n middleware would need to distinguish caches by the
"Accept-language" header.
"""
import hashlib
import logging
import re
import time
from django.conf import settings
from django.core.cache import caches
from django.http import HttpResponse, HttpResponseNotModified
from django.utils.encoding import force_bytes, force_text, iri_to_uri
from django.utils.http import (
http_date, parse_etags, parse_http_date_safe, quote_etag,
)
from django.utils.timezone import get_current_timezone_name
from django.utils.translation import get_language
cc_delim_re = re.compile(r'\s*,\s*')
logger = logging.getLogger('django.request')
def patch_cache_control(response, **kwargs):
"""
Patch the Cache-Control header by adding all keyword arguments to it.
The transformation is as follows:
* All keyword parameter names are turned to lowercase, and underscores
are converted to hyphens.
* If the value of a parameter is True (exactly True, not just a
true value), only the parameter name is added to the header.
* All other parameters are added with their value, after applying
str() to it.
"""
def dictitem(s):
t = s.split('=', 1)
if len(t) > 1:
return (t[0].lower(), t[1])
else:
return (t[0].lower(), True)
def dictvalue(t):
if t[1] is True:
return t[0]
else:
return '%s=%s' % (t[0], t[1])
if response.get('Cache-Control'):
cc = cc_delim_re.split(response['Cache-Control'])
cc = dict(dictitem(el) for el in cc)
else:
cc = {}
# If there's already a max-age header but we're being asked to set a new
# max-age, use the minimum of the two ages. In practice this happens when
# a decorator and a piece of middleware both operate on a given view.
if 'max-age' in cc and 'max_age' in kwargs:
kwargs['max_age'] = min(int(cc['max-age']), kwargs['max_age'])
# Allow overriding private caching and vice versa
if 'private' in cc and 'public' in kwargs:
del cc['private']
elif 'public' in cc and 'private' in kwargs:
del cc['public']
for (k, v) in kwargs.items():
cc[k.replace('_', '-')] = v
cc = ', '.join(dictvalue(el) for el in cc.items())
response['Cache-Control'] = cc
def get_max_age(response):
"""
Return the max-age from the response Cache-Control header as an integer,
or None if it wasn't found or wasn't an integer.
"""
if not response.has_header('Cache-Control'):
return
cc = dict(_to_tuple(el) for el in cc_delim_re.split(response['Cache-Control']))
try:
return int(cc['max-age'])
except (ValueError, TypeError, KeyError):
pass
def set_response_etag(response):
if not response.streaming:
response['ETag'] = quote_etag(hashlib.md5(response.content).hexdigest())
return response
def _precondition_failed(request):
logger.warning(
'Precondition Failed: %s', request.path,
extra={
'status_code': 412,
'request': request,
},
)
return HttpResponse(status=412)
def _not_modified(request, response=None):
new_response = HttpResponseNotModified()
if response:
# Preserve the headers required by Section 4.1 of RFC 7232, as well as
# Last-Modified.
for header in ('Cache-Control', 'Content-Location', 'Date', 'ETag', 'Expires', 'Last-Modified', 'Vary'):
if header in response:
new_response[header] = response[header]
# Preserve cookies as per the cookie specification: "If a proxy server
# receives a response which contains a Set-cookie header, it should
# propagate the Set-cookie header to the client, regardless of whether
# the response was 304 (Not Modified) or 200 (OK).
# https://curl.haxx.se/rfc/cookie_spec.html
new_response.cookies = response.cookies
return new_response
def get_conditional_response(request, etag=None, last_modified=None, response=None):
# Only return conditional responses on successful requests.
if response and not (200 <= response.status_code < 300):
return response
# Get HTTP request headers.
if_match_etags = parse_etags(request.META.get('HTTP_IF_MATCH', ''))
if_unmodified_since = request.META.get('HTTP_IF_UNMODIFIED_SINCE')
if if_unmodified_since:
if_unmodified_since = parse_http_date_safe(if_unmodified_since)
if_none_match_etags = parse_etags(request.META.get('HTTP_IF_NONE_MATCH', ''))
if_modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE')
if if_modified_since:
if_modified_since = parse_http_date_safe(if_modified_since)
# Step 1 of section 6 of RFC 7232: Test the If-Match precondition.
if if_match_etags and not _if_match_passes(etag, if_match_etags):
return _precondition_failed(request)
# Step 2: Test the If-Unmodified-Since precondition.
if (not if_match_etags and if_unmodified_since and
not _if_unmodified_since_passes(last_modified, if_unmodified_since)):
return _precondition_failed(request)
# Step 3: Test the If-None-Match precondition.
if if_none_match_etags and not _if_none_match_passes(etag, if_none_match_etags):
if request.method in ('GET', 'HEAD'):
return _not_modified(request, response)
else:
return _precondition_failed(request)
# Step 4: Test the If-Modified-Since precondition.
if (not if_none_match_etags and if_modified_since and
not _if_modified_since_passes(last_modified, if_modified_since)):
if request.method in ('GET', 'HEAD'):
return _not_modified(request, response)
# Step 5: Test the If-Range precondition (not supported).
# Step 6: Return original response since there isn't a conditional response.
return response
def _if_match_passes(target_etag, etags):
"""
Test the If-Match comparison as defined in section 3.1 of RFC 7232.
"""
if not target_etag:
# If there isn't an ETag, then there can't be a match.
return False
elif etags == ['*']:
# The existence of an ETag means that there is "a current
# representation for the target resource", even if the ETag is weak,
# so there is a match to '*'.
return True
elif target_etag.startswith('W/'):
# A weak ETag can never strongly match another ETag.
return False
else:
# Since the ETag is strong, this will only return True if there's a
# strong match.
return target_etag in etags
def _if_unmodified_since_passes(last_modified, if_unmodified_since):
"""
Test the If-Unmodified-Since comparison as defined in section 3.4 of
RFC 7232.
"""
return last_modified and last_modified <= if_unmodified_since
def _if_none_match_passes(target_etag, etags):
"""
Test the If-None-Match comparison as defined in section 3.2 of RFC 7232.
"""
if not target_etag:
# If there isn't an ETag, then there isn't a match.
return True
elif etags == ['*']:
# The existence of an ETag means that there is "a current
# representation for the target resource", so there is a match to '*'.
return False
else:
# The comparison should be weak, so look for a match after stripping
# off any weak indicators.
target_etag = target_etag.strip('W/')
etags = (etag.strip('W/') for etag in etags)
return target_etag not in etags
def _if_modified_since_passes(last_modified, if_modified_since):
"""
Test the If-Modified-Since comparison as defined in section 3.3 of RFC 7232.
"""
return not last_modified or last_modified > if_modified_since
def patch_response_headers(response, cache_timeout=None):
"""
Add HTTP caching headers to the given HttpResponse: Expires and
Cache-Control.
Each header is only added if it isn't already set.
cache_timeout is in seconds. The CACHE_MIDDLEWARE_SECONDS setting is used
by default.
"""
if cache_timeout is None:
cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
if cache_timeout < 0:
cache_timeout = 0 # Can't have max-age negative
if not response.has_header('Expires'):
response['Expires'] = http_date(time.time() + cache_timeout)
patch_cache_control(response, max_age=cache_timeout)
def add_never_cache_headers(response):
"""
Add headers to a response to indicate that a page should never be cached.
"""
patch_response_headers(response, cache_timeout=-1)
patch_cache_control(response, no_cache=True, no_store=True, must_revalidate=True)
def patch_vary_headers(response, newheaders):
"""
Add (or update) the "Vary" header in the given HttpResponse object.
newheaders is a list of header names that should be in "Vary". Existing
headers in "Vary" aren't removed.
"""
# Note that we need to keep the original order intact, because cache
# implementations may rely on the order of the Vary contents in, say,
# computing an MD5 hash.
if response.has_header('Vary'):
vary_headers = cc_delim_re.split(response['Vary'])
else:
vary_headers = []
# Use .lower() here so we treat headers as case-insensitive.
existing_headers = {header.lower() for header in vary_headers}
additional_headers = [newheader for newheader in newheaders
if newheader.lower() not in existing_headers]
response['Vary'] = ', '.join(vary_headers + additional_headers)
def has_vary_header(response, header_query):
"""
Check to see if the response has a given header name in its Vary header.
"""
if not response.has_header('Vary'):
return False
vary_headers = cc_delim_re.split(response['Vary'])
existing_headers = {header.lower() for header in vary_headers}
return header_query.lower() in existing_headers
def _i18n_cache_key_suffix(request, cache_key):
"""If necessary, add the current locale or time zone to the cache key."""
if settings.USE_I18N or settings.USE_L10N:
# first check if LocaleMiddleware or another middleware added
# LANGUAGE_CODE to request, then fall back to the active language
# which in turn can also fall back to settings.LANGUAGE_CODE
cache_key += '.%s' % getattr(request, 'LANGUAGE_CODE', get_language())
if settings.USE_TZ:
# The datetime module doesn't restrict the output of tzname().
# Windows is known to use non-standard, locale-dependent names.
# User-defined tzinfo classes may return absolutely anything.
# Hence this paranoid conversion to create a valid cache key.
tz_name = force_text(get_current_timezone_name(), errors='ignore')
cache_key += '.%s' % tz_name.encode('ascii', 'ignore').decode('ascii').replace(' ', '_')
return cache_key
def _generate_cache_key(request, method, headerlist, key_prefix):
"""Return a cache key from the headers given in the header list."""
ctx = hashlib.md5()
for header in headerlist:
value = request.META.get(header)
if value is not None:
ctx.update(force_bytes(value))
url = hashlib.md5(force_bytes(iri_to_uri(request.build_absolute_uri())))
cache_key = 'views.decorators.cache.cache_page.%s.%s.%s.%s' % (
key_prefix, method, url.hexdigest(), ctx.hexdigest())
return _i18n_cache_key_suffix(request, cache_key)
def _generate_cache_header_key(key_prefix, request):
"""Return a cache key for the header cache."""
url = hashlib.md5(force_bytes(iri_to_uri(request.build_absolute_uri())))
cache_key = 'views.decorators.cache.cache_header.%s.%s' % (
key_prefix, url.hexdigest())
return _i18n_cache_key_suffix(request, cache_key)
def get_cache_key(request, key_prefix=None, method='GET', cache=None):
"""
Return a cache key based on the request URL and query. It can be used
in the request phase because it pulls the list of headers to take into
account from the global URL registry and uses those to build a cache key
to check against.
If there isn't a headerlist stored, return None, indicating that the page
needs to be rebuilt.
"""
if key_prefix is None:
key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
cache_key = _generate_cache_header_key(key_prefix, request)
if cache is None:
cache = caches[settings.CACHE_MIDDLEWARE_ALIAS]
headerlist = cache.get(cache_key)
if headerlist is not None:
return _generate_cache_key(request, method, headerlist, key_prefix)
else:
return None
def learn_cache_key(request, response, cache_timeout=None, key_prefix=None, cache=None):
"""
Learn what headers to take into account for some request URL from the
response object. Store those headers in a global URL registry so that
later access to that URL will know what headers to take into account
without building the response object itself. The headers are named in the
Vary header of the response, but we want to prevent response generation.
The list of headers to use for cache key generation is stored in the same
cache as the pages themselves. If the cache ages some data out of the
cache, this just means that we have to build the response once to get at
the Vary header and so at the list of headers to use for the cache key.
"""
if key_prefix is None:
key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
if cache_timeout is None:
cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
cache_key = _generate_cache_header_key(key_prefix, request)
if cache is None:
cache = caches[settings.CACHE_MIDDLEWARE_ALIAS]
if response.has_header('Vary'):
is_accept_language_redundant = settings.USE_I18N or settings.USE_L10N
# If i18n or l10n are used, the generated cache key will be suffixed
# with the current locale. Adding the raw value of Accept-Language is
# redundant in that case and would result in storing the same content
# under multiple keys in the cache. See #18191 for details.
headerlist = []
for header in cc_delim_re.split(response['Vary']):
header = header.upper().replace('-', '_')
if header == 'ACCEPT_LANGUAGE' and is_accept_language_redundant:
continue
headerlist.append('HTTP_' + header)
headerlist.sort()
cache.set(cache_key, headerlist, cache_timeout)
return _generate_cache_key(request, request.method, headerlist, key_prefix)
else:
# if there is no Vary header, we still need a cache key
# for the request.build_absolute_uri()
cache.set(cache_key, [], cache_timeout)
return _generate_cache_key(request, request.method, [], key_prefix)
def _to_tuple(s):
t = s.split('=', 1)
if len(t) == 2:
return t[0].lower(), t[1]
return t[0].lower(), True
| uranusjr/django | django/utils/cache.py | Python | bsd-3-clause | 15,727 |
from PyQt4 import QtGui
class BooksListWidget(QtGui.QWidget):
""" Uneditable list's books """
def __init__(self, label):
super(BooksListWidget, self).__init__()
# init label and table widgets
self.title_label = QtGui.QLabel(label)
self.books_list = QtGui.QListView()
# List settings
self.books_list.minimumHeight()
# Make the list uneditable
self.books_list.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
# Create a model for the list's books
self.model = QtGui.QStringListModel()
# Apply the model to the list view
self.books_list.setModel(self.model)
# Create the layout
self.main_layout = QtGui.QVBoxLayout()
self.main_layout.addWidget(self.title_label)
self.main_layout.addWidget(self.books_list)
# Set the layout
self.setLayout(self.main_layout)
def update_list(self, books_list):
""" Update the books list """
assert isinstance(books_list, list)
self.model.setStringList(books_list)
| franramirez688/Taric-Challange | taric_challange/gui/widgets/books_list.py | Python | mit | 1,089 |
result = 1
for i in range(1, int(input()) + 1):
result *= i
print(result)
| burakkose/HackerRank | Challenges/extra_long_factorials.py | Python | unlicense | 78 |
"""Utilities relating to data handling and processing.
This module does not define datatypes itself. See L{jazzparser.data}
for that.
"""
"""
============================== License ========================================
Copyright (C) 2008, 2010-12 University of Edinburgh, Mark Granroth-Wilding
This file is part of The Jazz Parser.
The Jazz Parser is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The Jazz Parser is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with The Jazz Parser. If not, see <http://www.gnu.org/licenses/>.
============================ End license ======================================
"""
__author__ = "Mark Granroth-Wilding <mark.granroth-wilding@ed.ac.uk>"
def hold_out(full, start, end):
if start is None or start == 0:
return full[end:]
elif end is None:
return full[:start]
else:
return full[:start] + full[end:]
def holdout_partition(input, partitions):
"""
Partitions the iterable input into the given number of partitions
and returns a list of subsets of the input with each of the
partitions excluded. Useful for doing heldout data evaluations.
"""
partition_size = len(input) / partitions
heldout_sets = []
for partition in range(partitions-1):
heldout_sets.append(hold_out(input, partition_size*partition, partition_size*(partition+1)))
# Last partition: throw in everything that's left
heldout_sets.append(hold_out(input, partition_size*(partitions-1), None))
return heldout_sets
def partition(input, partitions):
"""
The complement of holdout_partition. Simply splits the input
n ways.
"""
partition_size = len(input) / partitions
parts = []
for partition in range(partitions-1):
parts.append(input[partition_size*partition: partition_size*(partition+1)])
# Last partition: throw what's left in
parts.append(input[partition_size*(partitions-1):])
return parts
| markgw/jazzparser | src/jazzparser/utils/data.py | Python | gpl-3.0 | 2,389 |
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = patterns('',
url(r'^$', 'docbucket.views.home', name='home'),
url(r'^create/(?P<compiler>.+)$', 'docbucket.views.create', name='create'),
url(r'^list/$', 'docbucket.views.list', name='list'),
url(r'^list/(?P<tag_name>.+)/$', 'docbucket.views.list', name='list-tag'),
url(r'^show/(?P<doc_id>\d+)/$', 'docbucket.views.show', name='show'),
url(r'^search/$', 'docbucket.views.search', name='search'),
url(r'^_thumbnail/file/(?P<identifier>.+)$', 'docbucket.views.thumbnail', {'type': 'file'}, name='thumbnail-file'),
url(r'^_thumbnail/document/(?P<identifier>\d+)$', 'docbucket.views.thumbnail', {'type': 'document'}, name='thumbnail-document'),
url(r'^_document/(?P<doc_id>\d+)', 'docbucket.views.ajax_document', name='ajax_document'),
url(r'^_tags/', 'docbucket.views.ajax_tags', name='ajax_tags')
)
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | NaPs/Docbucket | docbucket/urls.py | Python | mit | 1,082 |
"""Dependency downloader for open_fortran_parser."""
import logging
import os
import pathlib
import platform
import typing as t
import urllib
import wget
_LOG = logging.getLogger(__name__)
def ensure_dependencies(
dependencies: t.Mapping[str, t.Tuple[urllib.parse.ParseResult, pathlib.Path]],
target_dir: pathlib.Path, download: bool = True, silent: bool = False) -> None:
"""Download missing depenedencies."""
if not target_dir.exists():
_LOG.warning('Creating directory "%s"...', target_dir)
os.makedirs(str(target_dir), exist_ok=True)
for dependency, (url_root, filename) in dependencies.items():
path = target_dir.joinpath(filename)
if path.is_file() and not silent:
_LOG.warning('%s is present already.', dependency)
continue
if not download:
_LOG.warning('%s is not present!', dependency)
continue
url = urllib.parse.urlunparse(url_root) + str(filename)
_LOG.warning('Downloading %s from URL "%s" to path "%s"...', dependency, url, path)
wget.download(url, str(path), bar=None if silent else wget.bar_adaptive)
if not silent:
print()
def cleanup_old_dependencies(
outdated_dependencies, current_dir: pathlib.Path,
backup_dir: t.Optional[pathlib.Path] = None):
if backup_dir is not None and not backup_dir.exists():
_LOG.warning('Creating directory "%s"...', backup_dir)
os.makedirs(str(backup_dir), exist_ok=True)
for dependency, filename in outdated_dependencies.items():
path = current_dir.joinpath(filename)
if not path.is_file():
_LOG.debug('%s already does not exist.', dependency)
continue
if backup_dir is None:
_LOG.warning('Deleting %s in path "%s"...', dependency, current_dir)
path.unlink()
else:
_LOG.warning('Moving %s from path "%s" to path "%s"...',
dependency, current_dir, backup_dir)
path.rename(backup_dir.joinpath(filename))
| mbdevpl/open-fortran-parser-xml | open_fortran_parser/dependencies.py | Python | apache-2.0 | 2,086 |
from edmunds.globals import abc, ABC
class BaseDriver(ABC):
"""
The base driver for storage-drivers
"""
def __init__(self, app):
"""
Initiate the instance
:param app: The application
:type app: Edmunds.Application
"""
self._app = app
@abc.abstractmethod
def write_stream(self, path, append=False, prefix=None):
"""
Get a write stream to a certain path
:param path: The path to the file
:type path: str
:param append: Append to the file
:type append: bool
:param prefix: The prefix
:type prefix: str
:return: The write stream
:rtype: Stream
"""
pass
@abc.abstractmethod
def read_stream(self, path, raise_errors=False, prefix=None):
"""
Get a read stream to a certain path
:param path: The path to the file
:type path: str
:param raise_errors: Raise the errors
:type raise_errors: bool
:param prefix: The prefix
:type prefix: str
:return: The read stream
:rtype: Stream
"""
pass
@abc.abstractmethod
def copy(self, path, new_path, raise_errors=False, prefix=None):
"""
Copy a certain path
:param path: The path to the file
:type path: str
:param new_path: The path to the new file
:type new_path: str
:param raise_errors: Raise the errors
:type raise_errors: bool
:param prefix: The prefix
:type prefix: str
:return: Success
:rtype: bool
"""
pass
@abc.abstractmethod
def delete(self, path, raise_errors=False, prefix=None):
"""
Delete a certain path
:param path: The path to the file
:type path: str
:param raise_errors: Raise the errors
:type raise_errors: bool
:param prefix: The prefix
:type prefix: str
:return: Success
:rtype: bool
"""
pass
@abc.abstractmethod
def exists(self, path, prefix=None):
"""
Check if a certain path exists
:param path: The path to the file
:type path: str
:param prefix: The prefix
:type prefix: str
:return: Exists
:rtype: bool
"""
pass
@abc.abstractmethod
def path(self, path, prefix=None):
"""
Get a processed path
:param path: The path to the file
:type path: str
:param prefix: The prefix
:type prefix: str
:return: Absolute path to file
:rtype: str
"""
pass
| LowieHuyghe/edmunds | edmunds/storage/drivers/basedriver.py | Python | apache-2.0 | 3,058 |
from __future__ import unicode_literals
from pydoc import deque
from threading import Thread
from Queue import Queue
import gc
import time
from traceback import print_exc
from django.db import connection
from py4j.java_gateway import JavaGateway
from py4j.protocol import Py4JJavaError
from codebase.models import CodeElementKind, CodeElement, MethodElement,\
ParameterElement, FieldElement
from docutil.progress_monitor import NullProgressMonitor
from codeutil.java_element import clean_java_name
JAVA_PARSER = 'java'
PARSER_WORKER = 4
HIERARCHY_WORKER = 2
class HierarchyWorker(Thread):
'''Worker that adds parents to code elements.'''
def __init__(self, hierarchies, codebase, progress_monitor):
'''
:param hierarchies: queue of [child_fqn, parent_fqn, parent_fqn, ...]
:param codebase:
:param progress_monitor:
'''
Thread.__init__(self)
self.hierarchies = hierarchies
self.progress_monitor = progress_monitor
self.codebase = codebase
self.setDaemon(True)
def run(self):
while True:
hierarchy = self.hierarchies.get()
if hierarchy is None:
# Sentinel value to indicate we are done!
break
child_fqn = hierarchy[0]
child = CodeElement.objects.filter(codebase=self.codebase).\
get(fqn=child_fqn)
for parent_fqn in hierarchy[1:]:
try:
parent_element = CodeElement.objects.\
filter(codebase=self.codebase).get(fqn=parent_fqn)
child.parents.add(parent_element)
except Exception:
# Not found!
# Probably because not in codebase!
pass
self.progress_monitor.work('Parsed {0} hierarchy: {1} parents'.\
format(child_fqn, len(hierarchy) - 1))
self.hierarchies.task_done()
self.hierarchies.task_done()
# Because django does not automatically close a connection created by
# a custom thread...
connection.close()
class CUWorker(Thread):
'''Worker that processes a compilation unit'''
def __init__(self, queue, codebase, hierarchies, gateway,
progress_monitor):
'''
:param queue: queue of (cu, package_code_element, cu_name, work_amount)
where cu is a Java CompilationUnit.
:param codebase:
:param hierarchies: queue of [child_fqn, parent_fqn, parent_fqn, ...]
:param gateway: Py4J gateway
:param progress_monitor:
'''
Thread.__init__(self)
self.setDaemon(True)
self.queue = queue
self.codebase = codebase
# Does not work as expected because if a thread is waiting while being
# a daemon and the last one, it seems that there may be glitches.
#self.daemon = True
self.hierarchies = hierarchies
self.gateway = gateway
self.progress_monitor = progress_monitor
self.class_kind = CodeElementKind.objects.get(kind='class')
self.annotation_kind = CodeElementKind.objects.get(kind='annotation')
self.enumeration_kind = CodeElementKind.objects.get(kind='enumeration')
self.field_kind = CodeElementKind.objects.get(kind='field')
self.method_kind = CodeElementKind.objects.get(kind='method')
self.method_parameter_kind = CodeElementKind.objects.get(
kind='method parameter')
self.annotation_field_kind = CodeElementKind.objects.get(
kind='annotation field')
self.enumeration_value_kind = CodeElementKind.objects.get(
kind='enumeration value')
self.ASTParser = self.gateway.jvm.org.eclipse.jdt.core.dom.ASTParser
self.JLS3 = self.gateway.jvm.org.eclipse.jdt.core.dom.AST.JLS3
self.ast_parser = self.ASTParser.newParser(self.JLS3)
self.IJavaElement = self.gateway.jvm.org.eclipse.jdt.core.IJavaElement
self.Modifier = self.gateway.jvm.org.eclipse.jdt.core.dom.Modifier
def _get_type_bindings(self, cunit):
children = cunit.getChildren()
new_types = []
type_type = self.IJavaElement.TYPE
for child in children:
if child.getElementType() == type_type:
new_types.append(child)
array = self.gateway.new_array(self.IJavaElement, len(new_types))
for i, type_element in enumerate(new_types):
array[i] = type_element
self.ast_parser.setSource(cunit)
bindings = self.ast_parser.createBindings(array, None)
return bindings
def run(self):
while True:
item = self.queue.get()
if item is None:
# Sentinel value to indicate we are done.
break
(cu, package_code_element, cu_name, work_amount) = item
self.progress_monitor.info('Parsing {0}'.format(cu_name))
try:
for type_binding in self._get_type_bindings(cu):
if type_binding is None:
# This is an anonymous class in a .class
continue
self._parse_type(type_binding, package_code_element)
except Exception:
print_exc()
# Useful for Py4J
gc.collect()
self.queue.task_done()
self.progress_monitor.work('Parsed {0}'.format(cu_name),
work_amount)
self.queue.task_done()
# Because django does not automatically close a connection created by
# a custom thread...
connection.close()
def _parse_type(self, type_binding, container_code_element):
if type_binding.isAnonymous():
return
java_element = type_binding.getJavaElement()
(simple_name, fqn) = clean_java_name(type_binding.getQualifiedName())
deprecated = type_binding.isDeprecated()
abstract = self.Modifier.isAbstract(type_binding.getModifiers()) or \
(type_binding.isInterface() and not type_binding.isAnnotation())
type_code_element = CodeElement(codebase=self.codebase,
simple_name=simple_name,
fqn=fqn,
eclipse_handle=java_element.getHandleIdentifier(),
parser=JAVA_PARSER,
deprecated=deprecated,
abstract=abstract)
type_code_element.binding = type_binding
if type_binding.isAnnotation():
type_code_element.kind = self.annotation_kind
elif type_binding.isEnum():
type_code_element.kind = self.enumeration_kind
else:
type_code_element.kind = self.class_kind
type_code_element.save()
type_code_element.containers.add(container_code_element)
self._parse_type_members(type_binding, type_code_element)
self._parse_type_hierarchy(type_binding, type_code_element)
def _parse_type_members(self, type_binding, type_code_element):
for method_binding in type_binding.getDeclaredMethods():
if method_binding.isAnnotationMember():
self._parse_annotation_field(method_binding, type_code_element)
else:
self._parse_method(method_binding, type_code_element)
for field_binding in type_binding.getDeclaredFields():
if field_binding.isEnumConstant():
self._parse_enumeration_value(field_binding, type_code_element)
else:
self._parse_field(field_binding, type_code_element)
for tbinding in type_binding.getDeclaredTypes():
self._parse_type(tbinding, type_code_element)
def _parse_type_hierarchy(self, type_binding, type_code_element):
supertypes = [type_code_element.fqn]
super_class = type_binding.getSuperclass()
if super_class != None:
(_, fqn) = clean_java_name(super_class.getQualifiedName())
supertypes.append(fqn)
for interface in type_binding.getInterfaces():
(_, fqn) = clean_java_name(interface.getQualifiedName())
supertypes.append(fqn)
# Save hierarchy for further processing
if len(supertypes) > 1:
self.hierarchies.append(supertypes)
def _parse_method(self, method_binding, container_code_element):
# method header
if self._is_private(method_binding):
return
java_element = method_binding.getJavaElement()
if java_element is None:
# This means that the method was inferred like default
# constructor.
# This is for compatibility with previous recodoc.
return
simple_name = method_binding.getName()
(_, fqn) = clean_java_name(
method_binding.getDeclaringClass().getQualifiedName())
fqn = fqn + '.' + simple_name
parameters = method_binding.getParameterTypes()
try:
parameter_names = java_element.getParameterNames()
except Py4JJavaError:
parameter_names = ["arg" for param in parameters]
params_length = len(parameters)
(return_simple_name, return_fqn) = clean_java_name(
method_binding.getReturnType().getQualifiedName())
deprecated = method_binding.isDeprecated()
type_binding = container_code_element.binding
abstract = self.Modifier.isAbstract(method_binding.getModifiers())\
or (type_binding.isInterface() and
not type_binding.isAnnotation())
method_code_element = MethodElement(codebase=self.codebase,
kind=self.method_kind, simple_name=simple_name,
fqn=fqn,
parameters_length=params_length,
eclipse_handle=java_element.getHandleIdentifier(),
return_simple_name=return_simple_name,
return_fqn=return_fqn,
parser=JAVA_PARSER,
deprecated=deprecated,
abstract=abstract)
# method container
method_code_element.save()
method_code_element.containers.add(container_code_element)
# parse parameters
for i, parameter in enumerate(parameters):
(type_simple_name, type_fqn) = clean_java_name(
parameter.getQualifiedName())
parameter_name = parameter_names[i]
if parameter_name.startswith('arg'):
parameter_name = ''
simple_name = fqn = parameter_name
parameter_code_element = ParameterElement(
codebase=self.codebase,
kind=self.method_parameter_kind,
simple_name=simple_name,
fqn=fqn,
type_simple_name=type_simple_name,
type_fqn=type_fqn,
index=i,
attcontainer=method_code_element,
parser=JAVA_PARSER)
parameter_code_element.save()
# If we ever need to get the deprecated replace
# method.getJavadoc()
# method.tags()
# look for tag.getTagName() == 'deprecated'
# look at subtag link or just plain text...
def _is_private(self, binding):
return self.Modifier.isPrivate(binding.getModifiers())
def _parse_field(self, field_binding, container_code_element):
if not self._is_private(field_binding):
java_element = field_binding.getJavaElement()
simple_name = field_binding.getName()
(_, fqn) = clean_java_name(
field_binding.getDeclaringClass().getQualifiedName())
fqn = fqn + '.' + simple_name
(type_simple_name, type_fqn) = clean_java_name(
field_binding.getType().getQualifiedName())
field_code_element = FieldElement(codebase=self.codebase,
kind=self.field_kind,
simple_name=simple_name,
fqn=fqn,
eclipse_handle=java_element.getHandleIdentifier(),
type_simple_name=type_simple_name,
type_fqn=type_fqn,
parser=JAVA_PARSER)
field_code_element.save()
field_code_element.containers.add(container_code_element)
def _parse_enumeration_value(self, field_binding, container_code_element):
if not self._is_private(field_binding):
java_element = field_binding.getJavaElement()
simple_name = field_binding.getName()
(_, fqn) = clean_java_name(
field_binding.getDeclaringClass().getQualifiedName())
fqn = fqn + '.' + simple_name
(type_simple_name, type_fqn) = clean_java_name(
field_binding.getType().getQualifiedName())
field_code_element = FieldElement(codebase=self.codebase,
kind=self.enumeration_value_kind,
simple_name=simple_name,
fqn=fqn,
eclipse_handle=java_element.getHandleIdentifier(),
type_simple_name=type_simple_name,
type_fqn=type_fqn,
parser=JAVA_PARSER)
field_code_element.save()
field_code_element.containers.add(container_code_element)
def _parse_annotation_field(self, method_binding, container_code_element):
if not self._is_private(method_binding):
java_element = method_binding.getJavaElement()
simple_name = method_binding.getName()
(_, fqn) = clean_java_name(
method_binding.getDeclaringClass().getQualifiedName())
fqn = fqn + '.' + simple_name
(type_simple_name, type_fqn) = clean_java_name(
method_binding.getReturnType().getQualifiedName())
field_code_element = FieldElement(codebase=self.codebase,
kind=self.annotation_field_kind,
simple_name=simple_name,
fqn=fqn,
eclipse_handle=java_element.getHandleIdentifier(),
type_simple_name=type_simple_name,
type_fqn=type_fqn,
attcontainer=container_code_element,
parser=JAVA_PARSER)
field_code_element.save()
field_code_element.containers.add(container_code_element)
class JavaParser(object):
'''Parses a Java codebase and creates the appropriate CodeElement.
This parser uses multiple threads to speed up the parsing. This parser
requires access to Eclipse/Py4J'''
JAVA_SRC_FOLDER = 'src'
def __init__(self, codebase, project_key, opt_input):
'''
:param project_key: The name of the project in the Eclipse workspace.
:param codebase: The codebase instance to which the CodeElement will
be associated with.
:param opt_input: Optional input. Not used by this parser.
'''
self.project_name = project_key
self.gateway = JavaGateway()
self.hierarchies = deque() # list of tuples. [(parent, child, child)]
self.codebase = codebase
self.queue = Queue()
self.package_kind = CodeElementKind.objects.get(kind='package')
if opt_input is None or opt_input.strip() == '' or opt_input == '-1':
self.proot_name = None
self.package_names = None
else:
inputs = opt_input.split(',')
self.proot_name = inputs[0].strip()
self.package_names = inputs[1:]
def _get_package_root(self):
ResourcePlugin = self.gateway.jvm.org.eclipse.core.resources.\
ResourcesPlugin
workspaceRoot = ResourcePlugin.getWorkspace().getRoot()
project = workspaceRoot.getProject(self.project_name)
java_project = self.gateway.jvm.org.eclipse.jdt.core.JavaCore.\
create(project)
if self.proot_name is None:
src_folder = project.getFolder(JavaParser.JAVA_SRC_FOLDER)
proot = java_project.getPackageFragmentRoot(src_folder)
else:
proot = None
for temp_proot in java_project.getAllPackageFragmentRoots():
if temp_proot.getElementName() == self.proot_name:
proot = temp_proot
break
return proot
def _should_filter_package(self, package_name):
if self.package_names is None:
return False
else:
should_keep = False
for pname in self.package_names:
if package_name.startswith(pname):
should_keep = True
break
return not should_keep
def _parse_packages(self, proot):
packages = []
for package in proot.getChildren():
if package.hasChildren():
package_name = package.getElementName()
if self._should_filter_package(package_name):
continue
package_code_element = CodeElement(codebase=self.codebase,
simple_name=package_name, fqn=package_name,
eclipse_handle=package.getHandleIdentifier(),
kind=self.package_kind, parser=JAVA_PARSER)
package_code_element.save()
packages.append((package, package_code_element))
return packages
def _need_class_files(self):
return self.proot_name is not None and self.proot_name.endswith('.jar')
def parse(self, progress_monitor=NullProgressMonitor()):
'''Parses the codebase and creates CodeElement instances.
:progress_monitor: A progress monitor to track the parsing progress.
'''
proot = self._get_package_root()
packages = self._parse_packages(proot)
progress_monitor.start('Parsing Java Project', len(packages))
# Start workers:
for _ in xrange(0, PARSER_WORKER):
worker = CUWorker(self.queue, self.codebase, self.hierarchies,
self.gateway, progress_monitor)
worker.start()
start = time.time()
for (package, package_code_element) in packages:
gc.collect() # for Py4J
if self._need_class_files():
cunits = package.getClassFiles()
else:
cunits = package.getCompilationUnits()
unit_length = float(len(cunits))
for cunit in cunits:
cu_name = cunit.getElementName()
if cu_name.find('$') > -1:
# Do not send internal classes: they will be parsed
# using type_binding.getDeclaredTypes()
continue
winput = (cunit, package_code_element, cu_name,
1.0 / unit_length)
self.queue.put(winput)
for _ in xrange(0, PARSER_WORKER):
self.queue.put(None)
progress_monitor.info('Done parsing packages. Waiting for CUs.')
self.queue.join()
progress_monitor.done()
self.gateway.close()
print('Time: ' + str(time.time() - start))
self.parse_hierarchy(progress_monitor)
def parse_hierarchy(self, progress_monitor=NullProgressMonitor()):
'''Builds the hierarchy of the parsed CodeElement instances.
Must be called *after* parse.
:param progress_monitor:
'''
queue = Queue()
for hierarchy in self.hierarchies:
queue.put(hierarchy)
progress_monitor.start('Parsing Java Hierarchy', len(self.hierarchies))
start = time.time()
for _ in xrange(0, HIERARCHY_WORKER):
# Sentinel value
queue.put(None)
worker = HierarchyWorker(queue, self.codebase, progress_monitor)
worker.start()
queue.join()
progress_monitor.done()
self.gateway.close()
print('Time: ' + str(time.time() - start))
| bartdag/recodoc2 | recodoc2/apps/codebase/parser/java_code_parser.py | Python | bsd-3-clause | 20,241 |
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from .models import Submission
from .serializers import SubmissionSerializer
from django.views.generic import ListView, DetailView
from django.views.generic.edit import CreateView
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from problem.models import Problem
from django.shortcuts import get_object_or_404
from .forms import SubmissionForm
from django_tables2 import RequestConfig
from .tables import SubmissionTable
# from guardian.shortcuts import get_objects_for_user
class SubmissionViewSet(viewsets.ModelViewSet):
queryset = Submission.objects.all()
serializer_class = SubmissionSerializer
permission_classes = (IsAuthenticated,)
class SubmissionListView(ListView):
model = Submission
def get_context_data(self, **kwargs):
context = super(SubmissionListView, self).get_context_data(**kwargs)
submissions_table = SubmissionTable(self.get_queryset())
RequestConfig(self.request).configure(submissions_table)
# add filter here
context['submissions_table'] = submissions_table
return context
class SubmissionDetailView(DetailView):
model = Submission
def get_context_data(self, **kwargs):
context = super(SubmissionDetailView, self).get_context_data(**kwargs)
return context
class SubmissionCreateView(CreateView):
model = Submission
form_class = SubmissionForm
template_name_suffix = '_create_form'
@method_decorator(login_required)
def dispatch(self, request, pid=None, *args, **kwargs):
pid = self.kwargs['pid']
self.problem = get_object_or_404(Problem.objects.all(), pk=pid)
return super(SubmissionCreateView, self).dispatch(request, *args, **kwargs)
def get_form_kwargs(self):
kw = super(SubmissionCreateView, self).get_form_kwargs()
kw['qs'] = self.problem.allowed_lang.all()
return kw
def get_context_data(self, **kwargs):
context = super(SubmissionCreateView, self).get_context_data(**kwargs)
context['problem'] = self.problem
return context
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.problem = self.problem
self.object.user = self.request.user
return super(SubmissionCreateView, self).form_valid(form)
| wangzitian0/BOJ-V4 | submission/views.py | Python | mit | 2,454 |
#!/usr/bin/env python
"""
Simplest demo of using PyGame to playback audio from Python
https://www.scivision.co/playing-sounds-from-numpy-arrays-in-python/
"""
import pygame
from time import sleep
from pyimagevideo import dialtone
fs = 8000 # Hz
x = (dialtone(fs) * 32768).astype('int16') # scale to int16 for sound card
pygame.mixer.pre_init(fs, size=-16, channels=1)
pygame.mixer.init()
sound = pygame.sndarray.make_sound(x)
sound.play()
sleep(0.01) # NOTE: Since sound playback is async, allow sound playback to start before Python exits
| scienceopen/pyimagevideo | audio_pygame.py | Python | gpl-3.0 | 549 |
from izrazi_cnf import *
#################### primeri za cnf, SAT ####################
p = Spr('p')
r = Spr('r')
q = Spr('q')
a = Spr('a')
b = Spr('b')
c = Spr('c')
d = Spr('d')
e = Spr('e')
f = Spr('f')
p1, p1_cnf = Ali(a), a
p2, p2_cnf = In(a,Neg(a)), F()
p3, p3_cnf = Ali(a,Neg(a)), T()
p4, p4_cnf = In(T(),F(),Ali(p,Neg(p))), F()
p5, p5_cnf = Ali(Neg(In(p,r,q))), Ali(Neg(r),Neg(p),Neg(q))
p6, p6_cnf = In(T(),In(p,Neg(p))), F()
p7, p7_cnf = In(a), a
p8, p8_cnf = Ali(p,In(q,p)), In(p,Ali(p,q))
p9, p9_cnf = In(p,Ali(q,Neg(p))), In(p,Ali(q,Neg(p)))
p10, p10_cnf = Ali(Ali(a)), a
p11, p11_cnf = Ali(Ali(a,b),In(Neg(a),b)), Ali(a,b)
p12, p12_cnf = In(Ali(p,q),Ali(p,r)), In(Ali(p,q),Ali(p,r))
p13, p13_cnf = In(In(p,q),In(q,r),In(r,p)), In(p,q,r)
p14, p14_cnf = Ali(In(p,q),In(a,b,Neg(p))), In(Ali(a,p),Ali(a,q),Ali(b,p),Ali(b,q),Ali(Neg(p),q))
p15, p15_cnf = In(Ali(In(p,r),In(a,b)),f), In(f, Ali(a,p),Ali(a,r),Ali(b,p),Ali(b,r))
p16 = Ali(In(a,Ali(Ali(b,d),In(e,f))),In(c,Ali(d,a)),e)
p17 = Ali(F(),Ali(a,b,c),In(Neg(a),e,f,Ali(c,d),T()),Neg(Ali(Neg(e),d)),In(b,f))
#################### primeri za sudoku ####################
## Neresljiv sudoku
sud1 = [(1,2,8),(1,4,3),(1,5,1),
(2,3,3),(2,6,5),(2,7,4),(2,9,1),
(3,4,7),(3,7,8),(3,8,5),
(4,1,9),(4,2,7),(4,6,2),(4,9,6),
(5,1,4),(5,2,6),(5,3,1),(5,7,7),(5,8,9),(5,9,4),
(6,1,2),(6,4,6),(6,8,8),(6,9,4),
(7,2,4),(7,3,7),(7,6,1),
(8,1,8),(8,3,9),(8,4,2),(8,7,3),
(9,5,7),(9,6,3),(9,8,1)]
## Sudokuji z resitvijo
sud2 = [(1,1,8),(1,5,9),(1,6,3),(1,7,7),(1,9,1),
(2,5,5),(2,7,3),(2,8,6),(2,9,9),
(3,3,5),(3,4,6),(3,5,7),
(4,6,8),(4,9,6),
(5,1,6),(5,2,7),(5,3,4),(5,4,9),(5,6,5),(5,7,2),(5,8,3),(5,9,8),
(6,1,1),(6,4,7),
(7,5,8),(7,6,9),(7,7,5),
(8,1,9),(8,2,1),(8,3,2),(8,5,3),
(9,1,5),(9,3,8),(9,4,4),(9,5,2),(9,9,7)]
sud3 = [(1,2,1),(1,5,6),(1,7,4),(1,8,3),(1,9,9),
(2,5,4),(2,6,3),(2,7,5),
(3,1,4),(3,4,9),(3,5,5),(3,6,1),(3,7,2),(3,8,8),
(4,5,2),(4,7,9),(4,8,5),(4,9,4),
(5,1,6),(5,2,9),(5,5,7),(5,6,4),(5,7,8),(5,8,1),(5,9,3),
(6,1,5),(6,3,3),(6,5,8),
(7,3,8),(7,6,5),
(8,3,1),(8,4,2),
(9,1,9),(9,3,4),(9,5,1),(9,8,7)]
sud4 = [(1,3,6),(1,5,7),(1,6,4),(1,8,8),(1,9,2),
(2,3,9),(2,6,5),(2,9,3),
(4,2,6),(4,4,5),(4,5,3),(4,6,2),
(5,4,1),(5,6,9),
(6,4,7),(6,5,8),(6,6,6),(6,8,2),
(7,1,7),(7,3,5),(7,5,9),(7,6,3),(7,8,6),
(8,1,6),(8,4,8),(8,7,3),
(9,1,1),(9,2,9),(9,4,6),(9,5,2),(9,7,8)]
sud5 = [(1,1,9),(1,2,2),(1,5,1),(1,6,5),
(2,3,5),(2,8,6),
(3,1,6),(3,2,1),(3,4,3),(3,9,4),
(4,1,2),(4,2,8),(4,5,4),
(5,1,1),(5,5,3),(5,9,6),
(6,5,8),(6,8,9),(6,9,5),
(7,1,4),(7,6,9),(7,8,5),(7,9,3),
(8,2,9),(8,7,6),
(9,4,8),(9,5,6),(9,8,4),(9,9,1)]
##Poln sudoku
sud6 = [(1,1,5),(1,2,3),(1,3,6),(1,4,9),(1,5,7),(1,6,4),(1,7,1),(1,8,8),(1,9,2),
(2,1,8),(2,2,7),(2,3,9),(2,4,2),(2,5,1),(2,6,5),(2,7,6),(2,8,4),(2,9,3),
(3,1,4),(3,2,2),(3,3,1),(3,4,3),(3,5,6),(3,6,8),(3,7,9),(3,8,7),(3,9,5),
(4,1,9),(4,2,6),(4,3,7),(4,4,5),(4,5,3),(4,6,2),(4,7,4),(4,8,1),(4,9,8),
(5,1,2),(5,2,5),(5,3,8),(5,4,1),(5,5,4),(5,6,9),(5,7,7),(5,8,3),(5,9,6),
(6,1,3),(6,2,1),(6,3,4),(6,4,7),(6,5,8),(6,6,6),(6,7,5),(6,8,2),(6,9,9),
(7,1,7),(7,2,8),(7,3,5),(7,4,4),(7,5,9),(7,6,3),(7,7,2),(7,8,6),(7,9,1),
(8,1,6),(8,2,4),(8,3,2),(8,4,8),(8,5,5),(8,6,1),(8,7,3),(8,8,9),(8,9,7),
(9,1,1),(9,2,9),(9,3,3),(9,4,6),(9,5,2),(9,6,7),(9,7,8),(9,8,5),(9,9,4)]
#################### primeri za barvanje grafa ####################
g1 = {'a':{'b'},'b':{'a','c'},'c':{'b','d'},'d':{'c','e'},'e':{'d'}}
g2 = {'a':{'b','f'},'b':{'a','c','f'},'c':{'b','d','f'},'d':{'c','e','f'},'e':{'d','f'},'f':{'a','b','c','d','e'}}
g3 = {'a':{'b','f','g'},'b':{'a','c'},'c':{'b','d','g'},'d':{'c','e'},'e':{'d','f','g'},'f':{'a','e','g'},'g':{'a','c','e','f'}}
g4 = {'a':{'b','c'},'b':{'a','e','f'},'c':{'a','d'},'d':{'c'},'e':{'b'},'f':{'b'}}
g5 = {1:{2,6,7},2:{1,7,5,3},3:{2,4},4:{3,5,6},5:{2,4,6},6:{1,5,4},7:{1,2}}
g6 = {1:{2,3,4,5,6},2:{1,3,4,5,6},3:{1,2,4,5,6},4:{1,2,3,5,6},5:{1,2,3,4,6},6:{1,2,3,4,5}} #poln graf na 6 vozliscih
g7 = {1:{2,5,6},2:{1,3,7},3:{2,4,8},4:{3,5,9},5:{4,1,10},6:{1,8,9},7:{2,9,10},8:{3,10,6},9:{4,6,7},10:{5,7,8}} #petersenov graf
g8 = {1:{2,4,5},2:{1,3,5,6},3:{2,5,6},4:{1,5},5:{1,2,3,4,6},6:{2,3,5}}
| JanaVi/LVR | primeri.py | Python | apache-2.0 | 4,471 |
"""Constants for the auth module."""
from datetime import timedelta
ACCESS_TOKEN_EXPIRATION = timedelta(minutes=30)
MFA_SESSION_EXPIRATION = timedelta(minutes=5)
GROUP_ID_ADMIN = "system-admin"
GROUP_ID_USER = "system-users"
GROUP_ID_READ_ONLY = "system-read-only"
| Danielhiversen/home-assistant | homeassistant/auth/const.py | Python | apache-2.0 | 267 |
from django.core.exceptions import ObjectDoesNotExist
from rest_framework.compat import NoReverseMatch
class MockObject(object):
def __init__(self, **kwargs):
self._kwargs = kwargs
for key, val in kwargs.items():
setattr(self, key, val)
def __str__(self):
kwargs_str = ', '.join([
'%s=%s' % (key, value)
for key, value in sorted(self._kwargs.items())
])
return '<MockObject %s>' % kwargs_str
class MockQueryset(object):
def __init__(self, iterable):
self.items = iterable
def __getitem__(self, val):
return self.items[val]
def get(self, **lookup):
for item in self.items:
if all([
getattr(item, key, None) == value
for key, value in lookup.items()
]):
return item
raise ObjectDoesNotExist()
class BadType(object):
"""
When used as a lookup with a `MockQueryset`, these objects
will raise a `TypeError`, as occurs in Django when making
queryset lookups with an incorrect type for the lookup value.
"""
def __eq__(self):
raise TypeError()
def mock_reverse(view_name, args=None, kwargs=None, request=None, format=None):
args = args or []
kwargs = kwargs or {}
value = (args + list(kwargs.values()) + ['-'])[0]
prefix = 'http://example.org' if request else ''
suffix = ('.' + format) if (format is not None) else ''
return '%s/%s/%s%s/' % (prefix, view_name, value, suffix)
def fail_reverse(view_name, args=None, kwargs=None, request=None, format=None):
raise NoReverseMatch()
| atombrella/django-rest-framework | tests/utils.py | Python | bsd-2-clause | 1,646 |
# Roundware Server is released under the GNU Affero General Public License v3.
# See COPYRIGHT.txt, AUTHORS.txt, and LICENSE.txt in the project root directory.
from __future__ import unicode_literals
from django.conf.urls import patterns, url, include
from roundware.api1 import views
import logging
logger = logging.getLogger(__name__)
urlpatterns = patterns('',
# V1 API
url(r'^$', 'api1.views.operations'),
url(r'^auth/', include('rest_framework.urls',
namespace='rest_framework')),
# V1 DRF API - V1 is partially REST.
url(r'^rest/$', views.APIRootView.as_view()),
url(r'^rest/asset/$', views.AssetList.as_view(),
name='api1-asset'),
url(r'^rest/assetlocation/$', views.AssetLocationList.as_view(),
name='api1-assetlocation'),
url(r'^rest/assetlocation/(?P<pk>[0-9]+)/$',
views.AssetLocationDetail.as_view(),
name='api1-assetlocation-detail'),
url(r'^rest/project/$', views.ProjectList.as_view(),
name='api1-project'),
url(r'^rest/event/$', views.EventList.as_view(),
name='api1-event'),
url(r'^rest/session/$', views.SessionList.as_view(),
name='api1-session'),
url(r'^rest/listeninghistoryitem/$',
views.ListeningHistoryItemList.as_view(),
name='api1-listeninghistoryitem'),
)
| yangjackascd/roundware-server | roundware/api1/urls.py | Python | agpl-3.0 | 1,338 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import factory
reload(factory)
from factory import *
def pyethapp_factory(branch='master'):
factory = BuildFactory()
for step in [
Git(
haltOnFailure=True,
logEnviron=False,
repourl='https://github.com/ethereum/pyethapp.git',
branch=branch,
mode='full',
method='copy',
codebase='pyethapp',
retry=(5, 3)
),
SetPropertyFromCommand(
haltOnFailure=True,
logEnviron=False,
name="set-version",
command='sed -ne "s/.*version=.*[^0-9]\([0-9]*\.[0-9]*\.[0-9]*\).*/\\1/p" setup.py',
property="version"
),
ShellCommand(
haltOnFailure=True,
logEnviron=False,
name="pip-requirements",
description="installing requirements",
descriptionDone="install requirements",
command=["pip", "install", "-r", "requirements.txt"]
),
ShellCommand(
haltOnFailure=True,
logEnviron=False,
name="upgrade-requirements",
description="upgrading test requirements",
descriptionDone="upgrade test requirements",
command=["pip", "install", "--upgrade", "--no-deps", "-r", "requirements.txt"]
),
ShellCommand(
haltOnFailure=True,
logEnviron=False,
name="pip-install",
description="installing",
descriptionDone="install",
command=["pip", "install", "-e", "."]
),
ShellCommand(
logEnviron=False,
description="running",
descriptionDone="run",
name="pyethapp",
command=["pyethapp", "--help"]
)
]: factory.addStep(step)
return factory
| vaporry/ethereum-buildbot | factories/pyethapp.py | Python | mit | 1,885 |
# -*- coding: utf-8 -*-
## This file is part of Invenio.
## Copyright (C) 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
mod_python->WSGI Framework utilities
This code has been taken from mod_python original source code and rearranged
here to easying the migration from mod_python to wsgi.
The code taken from mod_python is under the following License.
"""
# Copyright 2004 Apache Software Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You
# may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
#
# Originally developed by Gregory Trubetskoy.
#
# $Id: apache.py 468216 2006-10-27 00:54:12Z grahamd $
from wsgiref.headers import Headers
import time
import re
import os
import cgi
import cStringIO
import tempfile
from types import TypeType, ClassType, BuiltinFunctionType, MethodType, ListType
from invenio.config import CFG_TMPDIR, CFG_TMPSHAREDDIR
from invenio.webinterface_handler_config import \
SERVER_RETURN, \
HTTP_LENGTH_REQUIRED, \
HTTP_BAD_REQUEST, \
InvenioWebInterfaceWSGIContentLenghtError, \
InvenioWebInterfaceWSGIContentTypeError, \
InvenioWebInterfaceWSGIContentMD5Error
class table(Headers):
add = Headers.add_header
iteritems = Headers.items
def __getitem__(self, name):
ret = Headers.__getitem__(self, name)
if ret is None:
return ''
else:
return str(ret)
## Some functions made public
exists_config_define = lambda dummy: True
## Some constants
class metaCookie(type):
def __new__(cls, clsname, bases, clsdict):
_valid_attr = (
"version", "path", "domain", "secure",
"comment", "expires", "max_age",
# RFC 2965
"commentURL", "discard", "port",
# Microsoft Extension
"httponly" )
# _valid_attr + property values
# (note __slots__ is a new Python feature, it
# prevents any other attribute from being set)
__slots__ = _valid_attr + ("name", "value", "_value",
"_expires", "__data__")
clsdict["_valid_attr"] = _valid_attr
clsdict["__slots__"] = __slots__
def set_expires(self, value):
if type(value) == type(""):
# if it's a string, it should be
# valid format as per Netscape spec
try:
t = time.strptime(value, "%a, %d-%b-%Y %H:%M:%S GMT")
except ValueError:
raise ValueError, "Invalid expires time: %s" % value
t = time.mktime(t)
else:
# otherwise assume it's a number
# representing time as from time.time()
t = value
value = time.strftime("%a, %d-%b-%Y %H:%M:%S GMT",
time.gmtime(t))
self._expires = "%s" % value
def get_expires(self):
return self._expires
clsdict["expires"] = property(fget=get_expires, fset=set_expires)
return type.__new__(cls, clsname, bases, clsdict)
class Cookie(object):
"""
This class implements the basic Cookie functionality. Note that
unlike the Python Standard Library Cookie class, this class represents
a single cookie (not a list of Morsels).
"""
__metaclass__ = metaCookie
DOWNGRADE = 0
IGNORE = 1
EXCEPTION = 3
def parse(Class, str, **kw):
"""
Parse a Cookie or Set-Cookie header value, and return
a dict of Cookies. Note: the string should NOT include the
header name, only the value.
"""
dict = _parse_cookie(str, Class, **kw)
return dict
parse = classmethod(parse)
def __init__(self, name, value, **kw):
"""
This constructor takes at least a name and value as the
arguments, as well as optionally any of allowed cookie attributes
as defined in the existing cookie standards.
"""
self.name, self.value = name, value
for k in kw:
setattr(self, k.lower(), kw[k])
# subclasses can use this for internal stuff
self.__data__ = {}
def __str__(self):
"""
Provides the string representation of the Cookie suitable for
sending to the browser. Note that the actual header name will
not be part of the string.
This method makes no attempt to automatically double-quote
strings that contain special characters, even though the RFC's
dictate this. This is because doing so seems to confuse most
browsers out there.
"""
result = ["%s=%s" % (self.name, self.value)]
# pylint: disable=E1101
# The attribute _valid_attr is provided by the metaclass 'metaCookie'.
for name in self._valid_attr:
if hasattr(self, name):
if name in ("secure", "discard", "httponly"):
result.append(name)
else:
result.append("%s=%s" % (name, getattr(self, name)))
# pylint: enable=E1101
return "; ".join(result)
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__,
str(self))
# This is a simplified and in some places corrected
# (at least I think it is) pattern from standard lib Cookie.py
_cookiePattern = re.compile(
r"(?x)" # Verbose pattern
r"[,\ ]*" # space/comma (RFC2616 4.2) before attr-val is eaten
r"(?P<key>" # Start of group 'key'
r"[^;\ =]+" # anything but ';', ' ' or '='
r")" # End of group 'key'
r"\ *(=\ *)?" # a space, then may be "=", more space
r"(?P<val>" # Start of group 'val'
r'"(?:[^\\"]|\\.)*"' # a doublequoted string
r"|" # or
r"[^;]*" # any word or empty string
r")" # End of group 'val'
r"\s*;?" # probably ending in a semi-colon
)
def _parse_cookie(str, Class, names=None):
# XXX problem is we should allow duplicate
# strings
result = {}
matchIter = _cookiePattern.finditer(str)
for match in matchIter:
key, val = match.group("key"), match.group("val")
# We just ditch the cookies names which start with a dollar sign since
# those are in fact RFC2965 cookies attributes. See bug [#MODPYTHON-3].
if key[0] != '$' and names is None or key in names:
result[key] = Class(key, val)
return result
def get_cookies(req, Class=Cookie, **kw):
"""
A shorthand for retrieveing and parsing cookies given
a Cookie class. The class must be one of the classes from
this module.
"""
if not req.headers_in.has_key("cookie"):
return {}
cookies = req.headers_in["cookie"]
if type(cookies) == type([]):
cookies = '; '.join(cookies)
return Class.parse(cookies, **kw)
def get_cookie(req, name, Class=Cookie, **kw):
cookies = get_cookies(req, Class, names=[name], **kw)
if cookies.has_key(name):
return cookies[name]
parse_qs = cgi.parse_qs
parse_qsl = cgi.parse_qsl
# Maximum line length for reading. (64KB)
# Fixes memory error when upload large files such as 700+MB ISOs.
readBlockSize = 65368
""" The classes below are a (almost) a drop-in replacement for the
standard cgi.py FieldStorage class. They should have pretty much the
same functionality.
These classes differ in that unlike cgi.FieldStorage, they are not
recursive. The class FieldStorage contains a list of instances of
Field class. Field class is incapable of storing anything in it.
These objects should be considerably faster than the ones in cgi.py
because they do not expect CGI environment, and are
optimized specifically for Apache and mod_python.
"""
class Field:
def __init__(self, name, *args, **kwargs):
self.name = name
# Some third party packages such as Trac create
# instances of the Field object and insert it
# directly into the list of form fields. To
# maintain backward compatibility check for
# where more than just a field name is supplied
# and invoke an additional initialisation step
# to process the arguments. Ideally, third party
# code should use the add_field() method of the
# form, but if they need to maintain backward
# compatibility with older versions of mod_python
# they will not have a choice but to use old
# way of doing things and thus we need this code
# for the forseeable future to cope with that.
if args or kwargs:
self.__bc_init__(*args, **kwargs)
def __bc_init__(self, file, ctype, type_options,
disp, disp_options, headers = {}):
self.file = file
self.type = ctype
self.type_options = type_options
self.disposition = disp
self.disposition_options = disp_options
if disp_options.has_key("filename"):
self.filename = disp_options["filename"]
else:
self.filename = None
self.headers = headers
def __repr__(self):
"""Return printable representation."""
return "Field(%s, %s)" % (`self.name`, `self.value`)
def __getattr__(self, name):
if name != 'value':
raise AttributeError, name
if self.file:
self.file.seek(0)
value = self.file.read()
self.file.seek(0)
else:
value = None
return value
def __del__(self):
self.file.close()
class StringField(str):
""" This class is basically a string with
added attributes for compatibility with std lib cgi.py. Basically, this
works the opposite of Field, as it stores its data in a string, but creates
a file on demand. Field creates a value on demand and stores data in a file.
"""
filename = None
headers = {}
ctype = "text/plain"
type_options = {}
disposition = None
disp_options = None
def __new__(cls, value):
'''Create StringField instance. You'll have to set name yourself.'''
obj = str.__new__(cls, value)
obj.value = value
return obj
def __str__(self):
return str.__str__(self)
def __getattr__(self, name):
if name != 'file':
raise AttributeError, name
self.file = cStringIO.StringIO(self.value)
return self.file
def __repr__(self):
"""Return printable representation (to pass unit tests)."""
return "Field(%s, %s)" % (`self.name`, `self.value`)
class FieldList(list):
def __init__(self):
self.__table = None
list.__init__(self)
def table(self):
if self.__table is None:
self.__table = {}
for item in self:
if item.name in self.__table:
self.__table[item.name].append(item)
else:
self.__table[item.name] = [item]
return self.__table
def __delitem__(self, *args):
self.__table = None
return list.__delitem__(self, *args)
def __delslice__(self, *args):
self.__table = None
return list.__delslice__(self, *args)
def __iadd__(self, *args):
self.__table = None
return list.__iadd__(self, *args)
def __imul__(self, *args):
self.__table = None
return list.__imul__(self, *args)
def __setitem__(self, *args):
self.__table = None
return list.__setitem__(self, *args)
def __setslice__(self, *args):
self.__table = None
return list.__setslice__(self, *args)
def append(self, *args):
self.__table = None
return list.append(self, *args)
def extend(self, *args):
self.__table = None
return list.extend(self, *args)
def insert(self, *args):
self.__table = None
return list.insert(self, *args)
def pop(self, *args):
self.__table = None
return list.pop(self, *args)
def remove(self, *args):
self.__table = None
return list.remove(self, *args)
class FieldStorage:
def __init__(self, req, keep_blank_values=0, strict_parsing=0, file_callback=None, field_callback=None, to_tmp_shared=False):
#
# Whenever readline is called ALWAYS use the max size EVEN when
# not expecting a long line. - this helps protect against
# malformed content from exhausting memory.
#
self.list = FieldList()
self.wsgi_input_consumed = False
# always process GET-style parameters
if req.args:
pairs = parse_qsl(req.args, keep_blank_values)
for pair in pairs:
self.add_field(pair[0], pair[1])
if req.method != "POST":
return
try:
clen = int(req.headers_in["content-length"])
except (KeyError, ValueError):
# absent content-length is not acceptable
raise SERVER_RETURN, HTTP_LENGTH_REQUIRED
self.clen = clen
self.count = 0
if not req.headers_in.has_key("content-type"):
ctype = "application/x-www-form-urlencoded"
else:
ctype = req.headers_in["content-type"]
if ctype.startswith("application/x-www-form-urlencoded"):
pairs = parse_qsl(req.read(clen), keep_blank_values)
self.wsgi_input_consumed = True
for pair in pairs:
self.add_field(pair[0], pair[1])
return
elif not ctype.startswith("multipart/"):
# we don't understand this content-type
return
self.wsgi_input_consumed = True
# figure out boundary
try:
i = ctype.lower().rindex("boundary=")
boundary = ctype[i+9:]
if len(boundary) >= 2 and boundary[0] == boundary[-1] == '"':
boundary = boundary[1:-1]
boundary = re.compile("--" + re.escape(boundary) + "(--)?\r?\n")
except ValueError:
raise SERVER_RETURN, HTTP_BAD_REQUEST
# read until boundary
self.read_to_boundary(req, boundary, None)
end_of_stream = False
while not end_of_stream and not self.eof(): # jjj JIM BEGIN WHILE
## parse headers
ctype, type_options = "text/plain", {}
disp, disp_options = None, {}
headers = table([])
line = req.readline(readBlockSize)
self.count += len(line)
if self.eof():
end_of_stream = True
match = boundary.match(line)
if (not line) or match:
# we stop if we reached the end of the stream or a stop
# boundary (which means '--' after the boundary) we
# continue to the next part if we reached a simple
# boundary in either case this would mean the entity is
# malformed, but we're tolerating it anyway.
end_of_stream = (not line) or (match.group(1) is not None)
continue
skip_this_part = False
while line not in ('\r','\r\n'):
nextline = req.readline(readBlockSize)
self.count += len(nextline)
if self.eof():
end_of_stream = True
while nextline and nextline[0] in [ ' ', '\t']:
line = line + nextline
nextline = req.readline(readBlockSize)
self.count += len(nextline)
if self.eof():
end_of_stream = True
# we read the headers until we reach an empty line
# NOTE : a single \n would mean the entity is malformed, but
# we're tolerating it anyway
h, v = line.split(":", 1)
headers.add(h, v)
h = h.lower()
if h == "content-disposition":
disp, disp_options = parse_header(v)
elif h == "content-type":
ctype, type_options = parse_header(v)
#
# NOTE: FIX up binary rubbish sent as content type
# from Microsoft IE 6.0 when sending a file which
# does not have a suffix.
#
if ctype.find('/') == -1:
ctype = 'application/octet-stream'
line = nextline
match = boundary.match(line)
if (not line) or match:
# we stop if we reached the end of the stream or a
# stop boundary (which means '--' after the
# boundary) we continue to the next part if we
# reached a simple boundary in either case this
# would mean the entity is malformed, but we're
# tolerating it anyway.
skip_this_part = True
end_of_stream = (not line) or (match.group(1) is not None)
break
if skip_this_part:
continue
if disp_options.has_key("name"):
name = disp_options["name"]
else:
name = None
# create a file object
# is this a file?
if disp_options.has_key("filename"):
if file_callback and callable(file_callback):
file = file_callback(disp_options["filename"])
else:
if to_tmp_shared:
file = tempfile.NamedTemporaryFile(dir=CFG_TMPSHAREDDIR)
else:
file = tempfile.NamedTemporaryFile(dir=CFG_TMPDIR)
else:
if field_callback and callable(field_callback):
file = field_callback()
else:
file = cStringIO.StringIO()
# read it in
self.read_to_boundary(req, boundary, file)
if self.eof():
end_of_stream = True
file.seek(0)
# make a Field
if disp_options.has_key("filename"):
field = Field(name)
field.filename = disp_options["filename"]
else:
field = StringField(file.read())
field.name = name
field.file = file
field.type = ctype
field.type_options = type_options
field.disposition = disp
field.disposition_options = disp_options
field.headers = headers
self.list.append(field)
def add_field(self, key, value):
"""Insert a field as key/value pair"""
item = StringField(value)
item.name = key
self.list.append(item)
def __setitem__(self, key, value):
table = self.list.table()
if table.has_key(key):
items = table[key]
for item in items:
self.list.remove(item)
item = StringField(value)
item.name = key
self.list.append(item)
def read_to_boundary(self, req, boundary, file):
previous_delimiter = None
while not self.eof():
line = req.readline(readBlockSize)
self.count += len(line)
if not line:
# end of stream
if file is not None and previous_delimiter is not None:
file.write(previous_delimiter)
return True
match = boundary.match(line)
if match:
# the line is the boundary, so we bail out
# if the two last chars are '--' it is the end of the entity
return match.group(1) is not None
if line[-2:] == '\r\n':
# the line ends with a \r\n, which COULD be part
# of the next boundary. We write the previous line delimiter
# then we write the line without \r\n and save it for the next
# iteration if it was not part of the boundary
if file is not None:
if previous_delimiter is not None: file.write(previous_delimiter)
file.write(line[:-2])
previous_delimiter = '\r\n'
elif line[-1:] == '\r':
# the line ends with \r, which is only possible if
# readBlockSize bytes have been read. In that case the
# \r COULD be part of the next boundary, so we save it
# for the next iteration
assert len(line) == readBlockSize
if file is not None:
if previous_delimiter is not None: file.write(previous_delimiter)
file.write(line[:-1])
previous_delimiter = '\r'
elif line == '\n' and previous_delimiter == '\r':
# the line us a single \n and we were in the middle of a \r\n,
# so we complete the delimiter
previous_delimiter = '\r\n'
else:
if file is not None:
if previous_delimiter is not None: file.write(previous_delimiter)
file.write(line)
previous_delimiter = None
def eof(self):
return self.clen <= self.count
def __getitem__(self, key):
"""Dictionary style indexing."""
found = self.list.table()[key]
if len(found) == 1:
return found[0]
else:
return found
def get(self, key, default):
try:
return self.__getitem__(key)
except (TypeError, KeyError):
return default
def keys(self):
"""Dictionary style keys() method."""
return self.list.table().keys()
def __iter__(self):
return iter(self.keys())
def __repr__(self):
return repr(self.list.table())
def has_key(self, key):
"""Dictionary style has_key() method."""
return (key in self.list.table())
__contains__ = has_key
def __len__(self):
"""Dictionary style len(x) support."""
return len(self.list.table())
def getfirst(self, key, default=None):
""" return the first value received """
try:
return self.list.table()[key][0]
except KeyError:
return default
def getlist(self, key):
""" return a list of received values """
try:
return self.list.table()[key]
except KeyError:
return []
def items(self):
"""Dictionary-style items(), except that items are returned in the same
order as they were supplied in the form."""
return [(item.name, item) for item in self.list]
def __delitem__(self, key):
table = self.list.table()
values = table[key]
for value in values:
self.list.remove(value)
def clear(self):
self.list = FieldList()
def parse_header(line):
"""Parse a Content-type like header.
Return the main content-type and a dictionary of options.
"""
plist = map(lambda a: a.strip(), line.split(';'))
key = plist[0].lower()
del plist[0]
pdict = {}
for p in plist:
i = p.find('=')
if i >= 0:
name = p[:i].strip().lower()
value = p[i+1:].strip()
if len(value) >= 2 and value[0] == value[-1] == '"':
value = value[1:-1]
pdict[name] = value
return key, pdict
def apply_fs_data(object, fs, **args):
"""
Apply FieldStorage data to an object - the object must be
callable. Examine the args, and match then with fs data,
then call the object, return the result.
"""
# we need to weed out unexpected keyword arguments
# and for that we need to get a list of them. There
# are a few options for callable objects here:
fc = None
expected = []
if hasattr(object, "func_code"):
# function
fc = object.func_code
expected = fc.co_varnames[0:fc.co_argcount]
elif hasattr(object, 'im_func'):
# method
fc = object.im_func.func_code
expected = fc.co_varnames[1:fc.co_argcount]
elif type(object) in (TypeType,ClassType):
# class
fc = object.__init__.im_func.func_code
expected = fc.co_varnames[1:fc.co_argcount]
elif type(object) is BuiltinFunctionType:
# builtin
fc = None
expected = []
elif hasattr(object, '__call__'):
# callable object
if type(object.__call__) is MethodType:
fc = object.__call__.im_func.func_code
expected = fc.co_varnames[1:fc.co_argcount]
else:
# abuse of objects to create hierarchy
return apply_fs_data(object.__call__, fs, **args)
# add form data to args
for field in fs.list:
if field.filename:
val = field
else:
val = field.value
args.setdefault(field.name, []).append(val)
# replace lists with single values
for arg in args:
if ((type(args[arg]) is ListType) and
(len(args[arg]) == 1)):
args[arg] = args[arg][0]
# remove unexpected args unless co_flags & 0x08,
# meaning function accepts **kw syntax
if fc is None:
args = {}
elif not (fc.co_flags & 0x08):
for name in args.keys():
if name not in expected:
del args[name]
return object(**args)
RE_CDISPOSITION_FILENAME = re.compile(r'filename=(?P<filename>[\w\.]*)')
def handle_file_post(req, allowed_mimetypes=None):
"""
Handle the POST of a file.
@return: the a tuple with the full path to the file saved on disk,
and it's mimetype as provided by the request.
@rtype: (string, string)
"""
from invenio.bibdocfile import decompose_file, md5
## We retrieve the length
clen = req.headers_in["Content-Length"]
if clen is None:
raise InvenioWebInterfaceWSGIContentLenghtError("Content-Length header is missing")
try:
clen = int(clen)
assert (clen > 1)
except (ValueError, AssertionError):
raise InvenioWebInterfaceWSGIContentLenghtError("Content-Length header should contain a positive integer")
## Let's take the content type
ctype = req.headers_in["Content-Type"]
if allowed_mimetypes and ctype not in allowed_mimetypes:
raise InvenioWebInterfaceWSGIContentTypeError("Content-Type not in allowed list of content types: %s" % allowed_mimetypes)
## Let's optionally accept a suggested filename
suffix = prefix = ''
g = RE_CDISPOSITION_FILENAME.search(req.headers_in.get("Content-Disposition", ""))
if g:
dummy, prefix, suffix = decompose_file(g.group("filename"))
## Let's optionally accept an MD5 hash (and use it later for comparison)
cmd5 = req.headers_in["Content-MD5"]
if cmd5:
the_md5 = md5()
## Ok. We can initialize the file
fd, path = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=CFG_TMPDIR)
the_file = os.fdopen(fd, 'w')
## Let's read the file
while True:
chunk = req.read(min(10240, clen))
if len(chunk) < min(10240, clen):
## We expected to read at least clen (which is different than 0)
## but chunk was shorter! Gosh! Error! Panic!
the_file.close()
os.close(fd)
os.remove(path)
raise InvenioWebInterfaceWSGIContentLenghtError("File shorter than what specified in Content-Length")
if cmd5:
## MD5 was in the header let's compute it
the_md5.update(chunk)
## And let's definitively write the content to disk :-)
the_file.write(chunk)
clen -= len(chunk)
if clen == 0:
## That's it. Everything was read.
break
if cmd5 and the_md5.hexdigest().lower() != cmd5.strip().lower():
## Let's check the MD5
the_file.close()
os.close(fd)
os.remove(path)
raise InvenioWebInterfaceWSGIContentMD5Error("MD5 checksum does not match")
## Let's clean everything up
the_file.close()
return (path, ctype)
| Dziolas/invenio | modules/webstyle/lib/webinterface_handler_wsgi_utils.py | Python | gpl-2.0 | 29,713 |
##
## File: user_stats.py
##
## Author: Schuyler Martin <sam8050@rit.edu>
##
## Description: Python class that defines a datagram for storing statistics
## on users
##
from datagrams.datagram import Datagram
class UserStats(Datagram):
'''
Class for storing statistics on a user
'''
def __init__(self, uid="", init_map=None):
'''
UserStat constructor, uses optional named parameters
:param: uid UID of user that these stats belong to
:param: init_map Dictionary that maps class attributes to values
This map, if it is passed in, will replace all attributes that
are seen in the dictionary. This is how we load an object from
JSON in the DB
'''
super().__init__(uid, init_map)
# number of questions a student has asked or a tutor has answered
self.q_count = 0
# number of times logged into the system
self.login_count = 0
# override attributes in the map
if (init_map != None):
if ("q_count" in init_map):
self.q_count = init_map["q_count"]
if ("login_count" in init_map):
self.login_count = init_map["login_count"]
def __str__(self):
'''
Converts to a string equivalent
'''
title = "User Stats for " + self.uid + "\n"
return title + super().__str__()
def stat_count(self, var_name):
'''
Returns the stats measure of a specific variable
:param: var_name Variable to fetch
:return: Current variable count
'''
return self.__dict__[var_name]
def stat_increment(self, var_name, value=1):
'''
Increments the stats measure of a specific variable
:param: var_name Variable to increment
:param: value Optional parameter, how much to increment by
:return: Current variable count
'''
self.__dict__[var_name] += value
return self.stat_count(var_name)
| RIT-CS-Mentoring-Center-Queueing/mmcga_project | server/datagrams/user_stats.py | Python | mit | 2,044 |
from .core import *
from .viz import *
__version__ = "0.0.5"
| nmarincic/numbasom | numbasom/__init__.py | Python | mit | 61 |
#---------------------------------------------------------------------------
# Introdução a Programação de Computadores - IPC
# Universidade do Estado do Amazonas - UEA
# Prof. Jucimar Jr
#
# Adham Lucas da Silva Oliveira 1715310059
# Gabriel Barroso da Silva Lima 1715310011
# Guilherme Silva de Oliveira 1715310034
# Natália Cavalcante Xavier 1715310021
# Tiago Ferreira Aranha 1715310047
#
# Fazer um algoritmo em Python que:
# a) Leia o valor inteiro de n (n 1000) e os n valores de uma variável composta A de
# valores numéricos, ordenados de forma crescente;
# b) Determine e imprima, para cada número que se repete no conjunto, a quantidade de
# vezes em que ele aparece repetido;
# c) Elimine os elementos repetidos, formando um novo conjunto;
# d) Imprima o conjunto obtido no item c.
#---------------------------------------------------------------------------
n = int(input("digite a quantidade de números: "))
digitados = []
repete = 0
for i in range (0, n):
numero = int(input("digite o número: "))
digitados.append(numero)
i = 1
digitados.append(digitados[0]-1)
while i < n:
while digitados[i] == digitados[i-1]:
repete += 1
digitados.remove(i)
n -= 1
else:
if repete > 0:
print ("número", digitados[i-1], "repete", repete, "vez(es)")
repete = 0
i += 1
digitados.remove(digitados[0]-1)
for i in range (0, n):
print (digitados[i]) | jucimarjr/IPC_2017-1 | lista06/lista06_lista02_questao23.py | Python | apache-2.0 | 1,566 |
Enter file contents here"""
object_name.py: Put the text of the current NVDA navigator object into a new window.
To install, place this file in the globalPlugins folder. You can get to this folder by typing the following into your run box (windows key + r):
%appdata%\nvda\globalPlugins
Once this is done, simply reload NVDA.
To use, focus the object you want to view the text for, then press NVDA+CTRL+N.
This file written by Chris Norman for use in Coventry Samaritans.
Feel free to distribute, use, learn from, edit or whatever. I only ask that if you do use or modify it, you send me an email just so I can feel good about myself! :-)
My email address, should you want to stroke my ego, complain or get help is
chris.norman2@googlemail.com
"""
from globalPluginHandler import GlobalPlugin
from api import getFocusObject
from ui import message
frame = None # The frame when it opens. Gets reset upon close.
class GlobalPlugin(GlobalPlugin):
"""The plugin required to make this whole thing work."""
def script_objectName(self, gesture):
"""Pops up the text of the current object in a new window."""
text = getattr(getFocusObject(), 'windowText', None)
if text != None:
text = text.strip('\n')
if not text:
message('No text.')
else:
global frame
if frame:
frame.setText(text)
else:
frame = ObjectNameFrame(text)
__gestures = {
'kb:NVDA+control+n': 'objectName'
}
import wx
class ObjectNameFrame(wx.Frame):
"""The frame to show."""
def __init__(self, text):
"""Text is the text to populate the frame with."""
super(ObjectNameFrame, self).__init__(None, title = 'Object Text')
p = wx.Panel(self)
s = wx.BoxSizer(wx.HORIZONTAL)
s1 = wx.BoxSizer(wx.VERTICAL)
s1.Add(wx.StaticText(p, label = '&Object Text'), 0, wx.GROW)
self.text = wx.TextCtrl(p, style = wx.TE_MULTILINE)
s1.Add(self.text, 1, wx.GROW)
s.Add(s1, 1, wx.GROW)
s2 = wx.BoxSizer(wx.VERTICAL)
self.closeButton = wx.Button(p, label = 'Close &Window')
s2.Add(self.closeButton, 1, wx.GROW)
self.closeButton.Bind(wx.EVT_BUTTON, lambda event: self.Close(True))
self.findText = '' # The text that the user searched for last time.
self.findButton = wx.Button(p, label = '&Find...')
s2.Add(self.findButton, 1, wx.GROW)
self.findButton.Bind(wx.EVT_BUTTON, self.find)
self.findAgainButton = wx.Button(p, label = 'Find A&gain')
s2.Add(self.findAgainButton, 1, wx.GROW)
self.findAgainButton.Bind(wx.EVT_BUTTON, lambda event: self.find(text = self.findText))
s.Add(s2, 0, wx.GROW)
p.SetSizerAndFit(s)
self.menu = wx.MenuBar()
self.editMenu = wx.Menu()
self.Bind(wx.EVT_MENU, self.find, self.editMenu.Append(wx.ID_FIND, '&Find...\tCTRL+F', 'Search for a string.'))
self.Bind(wx.EVT_MENU, lambda event: self.find(text = self.findText), self.editMenu.Append(wx.ID_ANY, 'Find A&gain\tF3', 'Search again.'))
self.menu.Append(self.editMenu, '&Edit')
self.SetMenuBar(self.menu)
self.Bind(wx.EVT_CLOSE, self.onClose)
self.setText(text)
def Show(self, value = True):
"""Show the window, maximizing in the process."""
res = super(ObjectNameFrame, self).Show(value)
self.Maximize(True)
return res
def find(self, event = None, text = None):
"""Find function."""
if text == None:
dlg = wx.TextEntryDialog(self, 'Enter a string to search for', 'Find', self.findText)
if dlg.ShowModal() == wx.ID_OK:
text = dlg.GetValue()
dlg.Destroy()
if text:
self.findText = text
text = text.lower()
i = self.text.GetInsertionPoint()
f = self.text.GetValue().lower().find(text, i)
if f >= 0:
f += self.text.GetValue().count('\n', 0, f)
self.text.SetSelection(f, f + len(text))
else:
wx.Bell()
return self.find()
def setText(self, text):
"""Set the text field to the provided text."""
self.text.SetValue(text)
self.Raise()
self.Show(True)
def onClose(self, event):
"""Close the window, clearing frame first."""
global frame
frame = None
event.Skip()
| chrisnorman7/object_name | object_name.py | Python | gpl-2.0 | 3,969 |
#encoding=utf-8
import ldap
SERVER_NAME = '192.168.0.9'
SERVER_PORT = 389
try:
conn = ldap.open("127.0.0.1")
conn.protocol_version = ldap.VERSION3 #设置ldap协议版本
username = "cn=Manager,o=example.com" #身份信息
password = "secret" #访问密码
conn.simple_bind_s(username,password) # 开始绑定,验证成功的话不会抛出异常
except ldap.LDAPError, e: #捕获出错信息
print e
| solvery/lang-features | python/network/ldap_1/ldap_client_1.py | Python | gpl-2.0 | 431 |
# -*- coding: utf-8 -*-
import unittest
import numpy as np
import sys
sys.path.append('..')
from rt1.scatter import Scatter
class TestScatter(unittest.TestCase):
def test_p(self):
S = Scatter()
theta_0 = np.pi/2.
theta_s = 0.234234
phi_0 = np.pi/2.
phi_s = 0.
p = S.scat_angle(theta_0, theta_s, phi_0, phi_s, a=[-1.,1.,1.]) # cos(theta)=0
self.assertAlmostEqual(p, 0.,10)
theta_0 = 0.
theta_s = 0.
phi_0 = np.pi/2.
phi_s = 0.12345
p = S.scat_angle(theta_0, theta_s, phi_0, phi_s, a=[-.7,1.,1.]) # cos(theta)=-1
self.assertAlmostEqual(p, -.7,10)
if __name__ == "__main__":
unittest.main()
| TUW-GEO/rt1 | tests/test_scatter.py | Python | apache-2.0 | 717 |
# coding=utf-8
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import argparse
import os
import simplejson as json
import grpc
from google.protobuf.json_format import MessageToJson
from qrl.core import config
from qrl.core.AddressState import AddressState
from qrl.crypto.xmss import XMSS
from qrl.core.txs.Transaction import Transaction
from qrl.core.txs.TransferTransaction import TransferTransaction
from pyqrllib.pyqrllib import hstr2bin, bin2hstr
from qrl.generated import qrl_pb2_grpc, qrl_pb2, qrlmining_pb2, qrlmining_pb2_grpc
from flask import Flask, Response, request
from jsonrpc.backend.flask import api
app = Flask(__name__)
def read_slaves(slaves_filename):
with open(slaves_filename, 'r') as f:
slave_data = json.load(f)
slave_data[0] = bytes(hstr2bin(slave_data[0]))
return slave_data
def get_addr_state(addr: bytes) -> AddressState:
stub = get_public_stub()
response = stub.GetAddressState(request=qrl_pb2.GetAddressStateReq(address=addr))
return AddressState(response.state)
def set_unused_ots_key(xmss, addr_state, start=0):
for i in range(start, 2 ** xmss.height):
if not addr_state.ots_key_reuse(i):
xmss.set_ots_index(i)
return True
return False
def valid_payment_permission(public_stub, master_address_state, payment_xmss, json_slave_txn):
access_type = master_address_state.get_slave_permission(payment_xmss.pk)
if access_type == -1:
tx = Transaction.from_json(json_slave_txn)
public_stub.PushTransaction(request=qrl_pb2.PushTransactionReq(transaction_signed=tx.pbdata))
return None
if access_type == 0:
return True
return False
def get_unused_payment_xmss(public_stub):
global payment_slaves
global payment_xmss
master_address = payment_slaves[0]
master_address_state = get_addr_state(master_address)
if payment_xmss:
addr_state = get_addr_state(payment_xmss.address)
if set_unused_ots_key(payment_xmss, addr_state, payment_xmss.ots_index):
if valid_payment_permission(public_stub, master_address_state, payment_xmss, payment_slaves[2]):
return payment_xmss
else:
payment_xmss = None
if not payment_xmss:
unused_ots_found = False
for slave_seed in payment_slaves[1]:
xmss = XMSS.from_extended_seed(slave_seed)
addr_state = get_addr_state(xmss.address)
if set_unused_ots_key(xmss, addr_state): # Unused ots_key_found
payment_xmss = xmss
unused_ots_found = True
break
if not unused_ots_found: # Unused ots_key_found
return None
if not valid_payment_permission(public_stub, master_address_state, payment_xmss, payment_slaves[2]):
return None
return payment_xmss
@app.route('/api/<api_method_name>')
def api_proxy(api_method_name):
"""
Proxy JSON RPC requests to the gRPC server as well as converts back gRPC response
to JSON.
:param api_method_name:
:return:
"""
stub = qrl_pb2_grpc.PublicAPIStub(grpc.insecure_channel('{}:{}'.format(config.user.public_api_host,
config.user.public_api_port)))
public_api = qrl_pb2.DESCRIPTOR.services_by_name['PublicAPI']
api_method = public_api.FindMethodByName(api_method_name)
api_request = getattr(qrl_pb2, api_method.input_type.name)()
for arg in request.args:
if arg not in api_method.input_type.fields_by_name:
raise Exception('Invalid args %s', arg)
data_type = type(getattr(api_request, arg))
if data_type == bool and request.args[arg].lower() == 'false':
continue
value = data_type(request.args.get(arg, type=data_type))
setattr(api_request, arg, value)
resp = getattr(stub, api_method_name)(api_request, timeout=10)
return Response(response=MessageToJson(resp, sort_keys=True), status=200, mimetype='application/json')
def get_mining_stub():
global mining_stub
return mining_stub
def get_public_stub():
global public_stub
return public_stub
@api.dispatcher.add_method
def getlastblockheader(height=0):
stub = get_mining_stub()
request = qrlmining_pb2.GetLastBlockHeaderReq(height=height)
grpc_response = stub.GetLastBlockHeader(request=request, timeout=10)
block_header = {
'difficulty': grpc_response.difficulty,
'height': grpc_response.height,
'timestamp': grpc_response.timestamp,
'reward': grpc_response.reward,
'hash': grpc_response.hash,
'depth': grpc_response.depth
}
resp = {
"block_header": block_header,
"status": "OK"
}
return resp
@api.dispatcher.add_method
def getblockheaderbyheight(height):
return getlastblockheader(height)
@api.dispatcher.add_method
def getblocktemplate(reserve_size, wallet_address):
stub = get_mining_stub()
request = qrlmining_pb2.GetBlockToMineReq(wallet_address=wallet_address.encode())
grpc_response = stub.GetBlockToMine(request=request, timeout=10)
resp = {
'blocktemplate_blob': grpc_response.blocktemplate_blob,
'difficulty': grpc_response.difficulty,
'height': grpc_response.height,
'reserved_offset': grpc_response.reserved_offset,
'seed_hash': grpc_response.seed_hash,
'status': 'OK'
}
return resp
@api.dispatcher.add_method
def submitblock(blob):
stub = get_mining_stub()
request = qrlmining_pb2.SubmitMinedBlockReq(blob=bytes(hstr2bin(blob)))
response = stub.SubmitMinedBlock(request=request, timeout=10)
if response.error:
raise Exception # Mining pool expected exception when block submission fails
return MessageToJson(response, sort_keys=True)
@api.dispatcher.add_method
def getblockminingcompatible(height):
stub = get_mining_stub()
request = qrlmining_pb2.GetBlockMiningCompatibleReq(height=height)
response = stub.GetBlockMiningCompatible(request=request, timeout=10)
return MessageToJson(response, sort_keys=True)
@api.dispatcher.add_method
def transfer(destinations, fee, mixin, unlock_time):
if len(destinations) > config.dev.transaction_multi_output_limit:
raise Exception('Payment Failed: Amount exceeds the allowed limit')
addrs_to = []
amounts = []
for tx in destinations:
addrs_to.append(bytes(hstr2bin(tx['address'][1:]))) # Skipping 'Q'
amounts.append(tx['amount'])
stub = get_public_stub()
xmss = get_unused_payment_xmss(stub)
if not xmss:
raise Exception('Payment Failed: No Unused Payment XMSS found')
tx = TransferTransaction.create(addrs_to=addrs_to,
amounts=amounts,
message_data=None,
fee=fee,
xmss_pk=xmss.pk,
master_addr=payment_slaves[0])
tx.sign(xmss)
response = stub.PushTransaction(request=qrl_pb2.PushTransactionReq(transaction_signed=tx.pbdata))
if response.error_code != 3:
raise Exception('Transaction Submission Failed, Response Code: %s', response.error_code)
response = {'tx_hash': bin2hstr(tx.txhash)}
return response
app.add_url_rule('/json_rpc', 'api', api.as_view(), methods=['POST'])
def parse_arguments():
parser = argparse.ArgumentParser(description='QRL node')
parser.add_argument('--qrldir', '-d', dest='qrl_dir', default=config.user.qrl_dir,
help="Use a different directory for node data/configuration")
parser.add_argument('--network-type', dest='network_type', choices=['mainnet', 'testnet'],
default='mainnet', required=False, help="Runs QRL Testnet Node")
return parser.parse_args()
def main():
args = parse_arguments()
qrl_dir_post_fix = ''
copy_files = []
if args.network_type == 'testnet':
qrl_dir_post_fix = '-testnet'
package_directory = os.path.dirname(os.path.abspath(__file__))
copy_files.append(os.path.join(package_directory, 'network/testnet/genesis.yml'))
copy_files.append(os.path.join(package_directory, 'network/testnet/config.yml'))
config.user.qrl_dir = os.path.expanduser(os.path.normpath(args.qrl_dir) + qrl_dir_post_fix)
config.create_path(config.user.qrl_dir, copy_files)
config.user.load_yaml(config.user.config_path)
global payment_slaves, payment_xmss
global mining_stub, public_stub
mining_stub = qrlmining_pb2_grpc.MiningAPIStub(grpc.insecure_channel('{0}:{1}'.format(config.user.mining_api_host,
config.user.mining_api_port)))
public_stub = qrl_pb2_grpc.PublicAPIStub(grpc.insecure_channel('{0}:{1}'.format(config.user.public_api_host,
config.user.public_api_port)))
payment_xmss = None
payment_slaves = read_slaves(config.user.mining_pool_payment_wallet_path)
app.run(host=config.user.grpc_proxy_host, port=config.user.grpc_proxy_port)
if __name__ == '__main__':
main()
| cyyber/QRL | src/qrl/grpcProxy.py | Python | mit | 9,400 |
VERSION = '1.5.2-dev'
| twosigma/waiter | cli/waiter/version.py | Python | apache-2.0 | 22 |
#!/usr/bin/python3
from qgmap import *
if __name__ == '__main__' :
def goCoords() :
def resetError() :
coordsEdit.setStyleSheet('')
try : latitude, longitude = coordsEdit.text().split(",")
except ValueError :
coordsEdit.setStyleSheet("color: red;")
QtCore.QTimer.singleShot(500, resetError)
else :
gmap.centerAt(latitude, longitude)
gmap.moveMarker("MyDragableMark", latitude, longitude)
def goAddress() :
def resetError() :
addressEdit.setStyleSheet('')
coords = gmap.centerAtAddress(addressEdit.text())
if coords is None :
addressEdit.setStyleSheet("color: red;")
QtCore.QTimer.singleShot(500, resetError)
return
gmap.moveMarker("MyDragableMark", *coords)
coordsEdit.setText("{}, {}".format(*coords))
def onMarkerMoved(key, latitude, longitude) :
print("Moved!!", key, latitude, longitude)
coordsEdit.setText("{}, {}".format(latitude, longitude))
def onMarkerRClick(key) :
print("RClick on ", key)
gmap.setMarkerOptions(key, draggable=False)
def onMarkerLClick(key) :
print("LClick on ", key)
def onMarkerDClick(key) :
print("DClick on ", key)
gmap.setMarkerOptions(key, draggable=True)
def onMapMoved(latitude, longitude) :
print("Moved to ", latitude, longitude)
def onMapRClick(latitude, longitude) :
print("RClick on ", latitude, longitude)
def onMapLClick(latitude, longitude) :
print("LClick on ", latitude, longitude)
def onMapDClick(latitude, longitude) :
print("DClick on ", latitude, longitude)
app = QtGui.QApplication([])
w = QtGui.QDialog()
h = QtGui.QVBoxLayout(w)
l = QtGui.QFormLayout()
h.addLayout(l)
addressEdit = QtGui.QLineEdit()
l.addRow('Address:', addressEdit)
addressEdit.editingFinished.connect(goAddress)
coordsEdit = QtGui.QLineEdit()
l.addRow('Coords:', coordsEdit)
coordsEdit.editingFinished.connect(goCoords)
gmap = QGoogleMap(w)
gmap.mapMoved.connect(onMapMoved)
gmap.markerMoved.connect(onMarkerMoved)
gmap.mapClicked.connect(onMapLClick)
gmap.mapDoubleClicked.connect(onMapDClick)
gmap.mapRightClicked.connect(onMapRClick)
gmap.markerClicked.connect(onMarkerLClick)
gmap.markerDoubleClicked.connect(onMarkerDClick)
gmap.markerRightClicked.connect(onMarkerRClick)
h.addWidget(gmap)
gmap.setSizePolicy(
QtGui.QSizePolicy.MinimumExpanding,
QtGui.QSizePolicy.MinimumExpanding)
w.show()
gmap.waitUntilReady()
gmap.centerAt(41.35,2.05)
gmap.setZoom(13)
coords = gmap.centerAtAddress("Pau Casals 3, Santa Coloma de Cervelló")
# Many icons at: https://sites.google.com/site/gmapsdevelopment/
gmap.addMarker("MyDragableMark", *coords, **dict(
icon="http://google.com/mapfiles/ms/micons/blue-dot.png",
draggable=True,
title = "Move me!"
))
# Some Static points
for place in [
"Pau Casals 13, Santa Coloma de Cervelló",
"Ferrer 20, Santa Coloma de Cervelló",
]:
gmap.addMarkerAtAddress(place,
icon="http://google.com/mapfiles/ms/micons/green-dot.png",
)
gmap.setZoom(17)
app.exec_()
| vokimon/python-qgmap | qgmap-example.py | Python | gpl-3.0 | 2,964 |
# coding=utf-8
"""
Copyright 2015 Load Impact
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from datetime import datetime
from loadimpact.clients import Client
from loadimpact.exceptions import CoercionError
from loadimpact.fields import (
Field, DataStoreListField, DateTimeField, DictField, IntegerField,
StringField)
from loadimpact.resources import Resource
from loadimpact.utils import UTC
class MockClient(Client):
def _requests_request(self, method, *args, **kwargs):
pass
class MockResource(Resource):
fields = {}
def __init__(self, client, field_cls, field_value=None):
self.__class__.fields['field'] = field_cls
super(MockResource, self).__init__(client, field=field_value)
class TestFieldsField(unittest.TestCase):
def test_coerce(self):
self.assertRaises(NotImplementedError, Field.coerce, None)
class TestFieldsDataStoreListField(unittest.TestCase):
def setUp(self):
self.client = MockClient()
def test_coerce(self):
coerced = DataStoreListField.coerce([1, 2, 3, 4, 5])
self.assertEqual(coerced, [1, 2, 3, 4, 5])
def test_coerce_from_list_of_dicts(self):
coerced = DataStoreListField.coerce([{'id': 6}, {'id': 7}, {'id': 8}])
self.assertEqual(coerced, [6, 7, 8])
def test_get(self):
r = MockResource(self.client, DataStoreListField, [1, 2, 3, 4, 5])
self.assertEqual(r.field, [1, 2, 3, 4, 5])
class TestFieldsDateTimeField(unittest.TestCase):
def setUp(self):
self.now = datetime.utcnow().replace(tzinfo=UTC(), microsecond=0)
self.client = MockClient()
def test_coerce(self):
value = '%s+00:00' % self.now.strftime(DateTimeField.format)
coerced = DateTimeField.coerce(value)
self.assertEqual(coerced, self.now)
def test_coerce_bad_format(self):
self.assertRaises(CoercionError, DateTimeField.coerce, '2013-01-01')
def test_construct_bad_format(self):
self.assertRaises(CoercionError, MockResource, self.client,
DateTimeField, '2013-01-01')
def test_get(self):
value = '%s+00:00' % self.now.strftime(DateTimeField.format)
r = MockResource(self.client, DateTimeField, value)
self.assertEqual(r.field, self.now)
class TestFieldsIntegerField(unittest.TestCase):
def setUp(self):
self.client = MockClient()
def test_coerce(self):
coerced = IntegerField.coerce(1)
self.assertEqual(coerced, 1)
def test_coerce_from_string(self):
coerced = IntegerField.coerce('1')
self.assertEqual(coerced, 1)
def test_get(self):
r = MockResource(self.client, IntegerField, 1)
self.assertEqual(r.field, 1)
| loadimpact/loadimpact-sdk-python | test/test_fields.py | Python | apache-2.0 | 3,246 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Created on Feb 20, 2013
@author: maribelacosta
'''
class Revision(object):
'''
classdocs
'''
def __init__(self):
self.id = 0 # Fake sequential id. Starts in 0.
self.wikipedia_id = 0 # Wikipedia revision id.
self.contributor_id = 0; # Id of the contributor who performed the revision.
self.contributor_name = '' # Name of the contributor who performed the revision.
self.contributor_ip = '' # Name of the contributor who performed the revision.
self.paragraphs = {} # Dictionary of paragraphs. It is of the form {paragraph_hash : [Paragraph]}.
self.ordered_paragraphs = [] # Ordered paragraph hash.
self.length = 0 # Content length (bytes).
self.total_tokens = 0 # Number of tokens in the revision.
self.timestamp = 0
def __repr__(self):
return str(id(self))
def to_dict(self):
revision = {}
revision.update({'obj' : []})
for paragraph_hash in self.ordered_paragraphs:
p = []
for paragraph in self.paragraphs[paragraph_hash]:
p.append(repr(paragraph))
revision['obj'].append(p)
return revision
| priyankamandikal/wiki_accuracy_review | structures/Revision.py | Python | mit | 1,304 |
"""A filter that reduces CitcomS vtk input data.
"""
#Author: Martin Weier
#Copyright (C) 2006 California Institute of Technology
#This program is free software; you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation; either version 2 of the License, or
#any later version.
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# Enthought library imports.
from enthought.traits import Instance, Range, Int, Float, Enum, Trait, Button
from enthought.traits.ui import View, Group, Item
from enthought.tvtk.api import tvtk
# Local imports
from enthought.mayavi.core.filter import Filter
from CitcomSSphere import *
######################################################################
# `Threshold` class.
######################################################################
class CitcomSreduce(Filter):
# The version of this class. Used for persistence.
__version__ = 0
# The threshold filter.
probe_filter = Instance(tvtk.ProbeFilter, ())
citcomsgrid = CitcomSSphere()
sphere = Instance(tvtk.SphereSource,())
# Upper threshold (this is a dynamic trait that is changed when
# input data changes).
Radius = Range(0.0, 1.0, 0.0, desc='adjust radius')
theta = Range(3, 40, 3, desc='the theta resolution')
phi = Range(3, 40, 3, desc='the upper threshold of the filter')
Selected_Source = Enum( 'Sphere','CitcomSGrid',)
radius_max = Float(1.0)
set_radius_max = Button('Set Radius Max')
# Our view.
view = View(Item(name="Selected_Source"),
Group(Item(name='Radius'),
Item(name='theta'),
Item(name='phi'),
Item(name='radius_max'),
Item(name='set_radius_max', style='simple', label='Simple'),
show_border = True
),
)
grid_source = True
######################################################################
# `Filter` interface.
######################################################################
def setup_pipeline(self):
"""Override this method so that it *creates* its tvtk
pipeline.
This method is invoked when the object is initialized via
`__init__`. Note that at the time this method is called, the
tvtk data pipeline will *not* yet be setup. So upstream data
will not be available. The idea is that you simply create the
basic objects and setup those parts of the pipeline not
dependent on upstream sources and filters.
"""
# Just setup the default output of this filter.
s = self.sphere
s.set(radius=0.0,theta_resolution=20,phi_resolution=20)
self.probe_filter.input = s.output
self.outputs = [self.probe_filter.output]
def update_pipeline(self):
"""Override this method so that it *updates* the tvtk pipeline
when data upstream is known to have changed.
This method is invoked (automatically) when the input fires a
`pipeline_changed` event.
"""
# By default we set the input to the first output of the first
# input.
fil = self.probe_filter
fil.source = self.inputs[0].outputs[0]
self._calc_grid(0,self.theta,self.phi)
fil.update()
self.outputs[0] = fil.output
self.pipeline_changed = True
def update_data(self):
"""Override this method to do what is necessary when upstream
data changes.
This method is invoked (automatically) when any of the inputs
sends a `data_changed` event.
"""
self.probe_filter.source = self.inputs[0].outputs[0]
self.probe_filter.update()
# Propagate the data_changed event.
self.data_changed = True
def _calc_grid(self,radius,resolution_x,resolution_y):
fil = self.probe_filter
coords = []
if self.Selected_Source == 'CitcomSGrid':
for i in xrange(12):
coords += self.citcomsgrid.coords_of_cap(radius,self.theta,self.phi,i)
grid = tvtk.UnstructuredGrid()
#Connectivity for 2d-Data
#There is no need to interpolate with the CitcomS grid surface. If this is however
#wanted uncomment this code to create the CitcomS surface information
#for capnr in xrange(12):
# i=1
# for n in xrange((resolution_x+1)*(resolution_y+1) - (resolution_x+1)):
# if i%(resolution_x+1)!=0 :
# n0 = n+(capnr*((resolution_x+1)*(resolution_y+1)))
# n1 = n0+1
# n2 = n0+resolution_y+1
# n3 = n2+1
# grid.insert_next_cell(8,[n0,n1,n2,n3])
# i+=1
##
grid.points = coords
fil.input = grid
if self.Selected_Source == 'Sphere':
sphere = tvtk.SphereSource()
sphere.radius = radius
sphere.theta_resolution = resolution_x
sphere.phi_resolution = resolution_y
#Rotate the Sphere so that the poles are at the right location
transL = tvtk.Transform()
trans1 = tvtk.TransformPolyDataFilter()
trans2 = tvtk.TransformPolyDataFilter()
trans1.input = sphere.output
transL.rotate_y(90)
transL.update()
trans1.transform = transL
trans1.update()
trans2.input = trans1.output
transL.rotate_z(90)
transL.update()
trans2.transform = transL
trans2.update()
fil.input = trans2.output
fil.update()
######################################################################
# Non-public interface
######################################################################
def _Radius_changed(self, new_value):
fil = self.probe_filter
#self.sphere.radius = new_value
self._calc_grid(new_value,self.theta,self.phi)
fil.update()
self.data_changed = True
def _theta_changed(self, new_value):
fil = self.probe_filter
self._calc_grid(self.Radius,new_value,self.phi)
fil.update()
self.data_changed = True
def _phi_changed(self, new_value):
fil = self.probe_filter
self._calc_grid(self.Radius,self.theta,new_value)
fil.update()
self.data_changed = True
def _Selected_Source_changed(self,new_value):
self._calc_grid(self.Radius, self.theta, self.phi)
self.outputs[0] = self.probe_filter.output
self.data_changed = True
self.pipeline_changed = True
def _radius_max_changed(self,new_value):
if self.Radius > new_value:
self.Radius = new_value
if new_value <= 0.0:
self.radius_max = 0.0
def _set_radius_max_fired(self):
trait = Range(0.0, self.radius_max, self.Radius,
desc='adjust radius')
self.add_trait('Radius', trait)
| geodynamics/citcoms | visual/Mayavi2/original_plugins/plugins/filter/CitcomSreduce.py | Python | gpl-2.0 | 7,907 |
# Copyright (c) 2014. Mount Sinai School of Medicine
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function, division, absolute_import
from os.path import join
from .static_data import MATRIX_DIR
def read_coefficients(
key_type='row',
verbose=True,
filename=join(MATRIX_DIR, 'pmbec.mat')):
"""
Parameters
------------
filename : str
Location of PMBEC coefficient matrix
key_type : str
'row' : every key is a single amino acid,
which maps to a dictionary for that row
'pair' : every key is a tuple of amino acids
'pair_string' : every key is a string of two amino acid characters
verbose : bool
Print rows of matrix as we read them
"""
d = {}
if key_type == 'row':
def add_pair(row_letter, col_letter, value):
if row_letter not in d:
d[row_letter] = {}
d[row_letter][col_letter] = value
elif key_type == 'pair':
def add_pair(row_letter, col_letter, value):
d[(row_letter, col_letter)] = value
else:
assert key_type == 'pair_string', \
"Invalid dictionary key type: %s" % key_type
def add_pair(row_letter, col_letter, value):
d["%s%s" % (row_letter, col_letter)] = value
with open(filename, 'r') as f:
lines = [line for line in f.read().split('\n') if len(line) > 0]
header = lines[0]
if verbose:
print(header)
residues = [x for x in header.split(' ') if len(x) == 1 and x != ' ' and x != '\t']
assert len(residues) == 20
if verbose:
print(residues)
for line in lines[1:]:
cols = [
x
for x in line.split(' ')
if len(x) > 0 and x != ' ' and x != '\t'
]
assert len(cols) == 21, "Expected 20 values + letter, got %s" % cols
row_letter = cols[0]
for i, col in enumerate(cols[1:]):
col_letter = residues[i]
assert col_letter != ' ' and col_letter != '\t'
value = float(col)
add_pair(row_letter, col_letter, value)
return d
if __name__ == '__main__':
d = read_coefficients(key_type='pair_string')
print("PMBEC matrix")
for k in sorted(d):
print(k, d[k])
| dmnfarrell/epitopemap | modules/pepdata/pmbec.py | Python | apache-2.0 | 2,894 |
import bbcode
import markdown
import html
from c2corg_ui.format.wikilinks import C2CWikiLinkExtension
from c2corg_ui.format.img import C2CImageExtension
from c2corg_ui.format.video import C2CVideoExtension
from c2corg_ui.format.important import C2CImportantExtension
from c2corg_ui.format.warning import C2CWarningExtension
from markdown.extensions.nl2br import Nl2BrExtension
from markdown.extensions.toc import TocExtension
_markdown_parser = None
_bbcode_parser = None
_parsers_settings = None
def configure_parsers(settings):
global _parsers_settings
_parsers_settings = {
'api_url': settings.get('api_url')
}
def _get_markdown_parser():
global _markdown_parser
if not _markdown_parser:
extensions = [
C2CWikiLinkExtension(),
C2CImageExtension(api_url=_parsers_settings['api_url']),
C2CVideoExtension(),
C2CImportantExtension(),
C2CWarningExtension(),
Nl2BrExtension(),
TocExtension(marker='[toc]', baselevel=2),
]
_markdown_parser = markdown.Markdown(output_format='xhtml5',
extensions=extensions)
return _markdown_parser
def _get_bbcode_parser():
global _bbcode_parser
if not _bbcode_parser:
_bbcode_parser = bbcode.Parser(escape_html=False, newline='\n')
return _bbcode_parser
def parse_code(text, md=True, bb=True):
if bb:
text = _get_bbcode_parser().format(text)
if md:
text = _get_markdown_parser().convert(text)
return text
def sanitize(text):
return html.escape(text)
| Courgetteandratatouille/v6_ui | c2corg_ui/format/__init__.py | Python | agpl-3.0 | 1,630 |
class Solution(object):
def fizzBuzz(self, n):
"""
:type n: int
:rtype: List[str]
"""
num_str_arr = [""] * n
for i in range(1, n+1):
num_str = ""
if i % 3 != 0 and i % 5 != 0:
num_str = str(i)
else:
if i % 3 == 0:
num_str = "Fizz"
if i % 5 == 0:
num_str += "Buzz"
num_str_arr[i-1] = num_str
return num_str_arr
| zqfan/leetcode | algorithms/412. Fizz Buzz/solution.py | Python | gpl-3.0 | 506 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
if __name__ != '__main__':
__dicErros__ = {
00: "[Erro]: Arquivo contendo o código fonte não foi passado!",
01: "[Erro]: O arquivo passado possui extensão incompatível.",
02: "[Erro]: Erro lexico na linha "
}
__listaErros__ = []
def getErro(chave, linha, coluna):
"""A função retorna a string do erro correspondente contido no
dicionário de erros."""
if(linha is None and coluna is None):
return __dicErros__[chave]
return __dicErros__[chave] + str(linha) + " e coluna " + str(coluna)
def getListaErros():
"""A função retorna a lista de erros encontrados no código
fonte."""
return __listaErros__
def setListaErros(erro):
"""A função acrescenta ao final da lista de erros uma nova string
de um erro encontrado."""
return __listaErros__.append(erro)
| Lucasgscruz/AnalisadorLexico | src/erros.py | Python | gpl-3.0 | 952 |
#!/usr/bin/env python3
# coding: utf-8
import logging
import api
# :REFACTOR:
import datetime
def set_timer(seconds, func):
period = datetime.timedelta(seconds=seconds)
def on_timeout():
func()
io_loop.add_timeout(period, on_timeout)
io_loop.add_timeout(period, on_timeout)
if __name__ == "__main__":
from api import connect_to_dvr, StreamState
from tornado import gen
from tornado.ioloop import IOLoop
io_loop = IOLoop.instance()
host, port = "localhost", 8000
if False:
@gen.engine
def process(callback=None):
stream = yield gen.Task(connect, host, port)
if stream:
stream.close()
if callback:
callback(None)
io_loop.stop()
process()
io_loop.start()
if True:
test_timer = False
if test_timer:
def on_test_timer():
print("!")
set_timer(1, on_test_timer)
else:
def write_func(stream, is_first):
stream.write(b"Hello\n")
print("written!")
obj = api.make_struct()
def on_timeout():
connect_to_dvr(obj, (host, port), write_func)
set_timer(1, on_timeout)
io_loop.start()
| BradburyLab/show_tv | show_tv/test_connection.py | Python | gpl-3.0 | 1,417 |
#!/usr/bin/env python
#
# This file is protected by Copyright. Please refer to the COPYRIGHT file
# distributed with this source distribution.
#
# This file is part of REDHAWK server.
#
# REDHAWK server is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# REDHAWK server is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
from tornado.testing import main
import unittest
from tests import *
def all():
return unittest.TestLoader().loadTestsFromModule(__import__(__name__))
if __name__ == '__main__':
main()
| RedhawkSDR/rtl-demo-app | server/test.py | Python | lgpl-3.0 | 1,041 |
#!/usr/bin/env python
import matplotlib.pyplot as plt
import numpy as np
# Number of "papers using libmesh" by year.
#
# Note 1: this does not count citations "only," the authors must have actually
# used libmesh in part of their work. Therefore, these counts do not include
# things like Wolfgang citing us in his papers to show how Deal.II is
# superior...
#
# Note 2: I typically update this data after regenerating the web page,
# since bibtex2html renumbers the references starting from "1" each year.
#
# Note 3: These citations include anything that is not a dissertation/thesis.
# So, some are conference papers, some are journal articles, etc.
#
# Note 4: The libmesh paper came out in 2006, but there are some citations
# prior to that date, obviously. These counts include citations of the
# website libmesh.sf.net as well...
#
# Note 5: Preprints are listed as the "current year + 1" and are constantly
# being moved to their respective years after being published.
data = [
'\'04', 5,
'\'05', 2,
'\'06', 13,
'\'07', 8,
'\'08', 23,
'\'09', 29,
'\'10', 25,
'\'11', 32,
'\'12', 53,
'\'13', 78,
'\'14', 63,
'\'15', 79,
'\'16', 84,
'\'17', 31,
'T', 62
]
# Extract the x-axis labels from the data array
xlabels = data[0::2]
# Extract the publication counts from the data array
n_papers = data[1::2]
# The number of data points
N = len(xlabels);
# Get a reference to the figure
fig = plt.figure()
# 111 is equivalent to Matlab's subplot(1,1,1) command
ax = fig.add_subplot(111)
# Create an x-axis for plotting
x = np.linspace(1, N, N)
# Width of the bars
width = 0.8
# Make the bar chart. Plot years in blue, preprints and theses in green.
ax.bar(x[0:N-1], n_papers[0:N-1], width, color='b')
ax.bar(x[N-1:N], n_papers[N-1:N], width, color='g')
# Label the x-axis
plt.xlabel('T=PhD, MS, and BS Theses')
# Set up the xtick locations and labels. Note that you have to offset
# the position of the ticks by width/2, where width is the width of
# the bars.
ax.set_xticks(np.linspace(1,N,N) + width/2)
ax.set_xticklabels(xlabels)
# Create a title string
title_string = 'Papers by People Using LibMesh, (' + str(sum(n_papers)) + ' Total)'
fig.suptitle(title_string)
# Save as PDF
plt.savefig('libmesh_citations.pdf')
# Local Variables:
# python-indent: 2
# End:
| vikramvgarg/libmesh | doc/statistics/libmesh_citations.py | Python | lgpl-2.1 | 2,350 |
# core.py
#
# by Shaun Astbury
#
# Import required modules.
try:
from osgeo import ogr, osr
except:
import ogr
import osr
import os
ogr.UseExceptions()
# Driver names matched to driver instances.
drivers = {ogr.GetDriver(i).GetName(): ogr.GetDriver(i) for i in
range(ogr.GetDriverCount())}
# OGR Geometry methods for exporting geometries to other formats.
export_geometries = {'wkt': ogr.Geometry.ExportToWkt,
'wkb': ogr.Geometry.ExportToWkb,
'kml': ogr.Geometry.ExportToKML,
'json': ogr.Geometry.ExportToJson,
'gml': ogr.Geometry.ExportToGML}
# OSR SpatialReference methods for exporting to other formats.
export_sr = {'pwkt': osr.SpatialReference.ExportToPrettyWkt,
'wkt': osr.SpatialReference.ExportToWkt,
'proj4': osr.SpatialReference.ExportToProj4,
'pci': osr.SpatialReference.ExportToPCI,
'xml': osr.SpatialReference.ExportToXML,
'epsg': osr.SpatialReference.GetAttrValue}
# Matches extensions to drivers.
extensions = {}
for driver in drivers:
driver = drivers[driver]
data = driver.GetMetadata()
if "DMD_EXTENSIONS" in data:
exts = data["DMD_EXTENSIONS"]
exts = exts.split(" ")
for ext in exts:
extensions["." + ext] = driver
# Match single geometries to multi versions and vice versa.
geom_dict = {'POINT': 'MULTIPOINT',
'LINESTRING': 'MULTILINESTRING',
'POLYGON': 'MULTIPOLYGON',
'MULTIPOINT': 'POINT',
'MULTILINESTRING': 'LINESTRING',
'MULTIPOLYGON': 'POLYGON'}
# OGR geometry type codes to names.
geom_types = {ogr.wkbUnknown: 'UNKNOWN',
ogr.wkbPoint: 'POINT',
ogr.wkbLineString: 'LINESTRING',
ogr.wkbPolygon: 'POLYGON',
ogr.wkbMultiPoint: 'MULTIPOINT',
ogr.wkbMultiLineString: 'MULTILINESTRING',
ogr.wkbMultiPolygon: 'MULTIPOLYGON',
ogr.wkbGeometryCollection: 'GEOMETRYCOLLECTION',
ogr.wkbNone: 'NONE',
ogr.wkbLinearRing: 'LINEARRING'}
# OGR geometry creation functions.
import_geometries = {'wkt': ogr.CreateGeometryFromWkt,
'wkb': ogr.CreateGeometryFromWkb,
'json': ogr.CreateGeometryFromJson,
'gml': ogr.CreateGeometryFromGML,
'ogr': ogr.Geometry.Clone}
# OSR SpatialReference methods for importing from other formats.
import_sr = {'wkt': osr.SpatialReference.ImportFromWkt,
'proj4': osr.SpatialReference.ImportFromProj4,
'url': osr.SpatialReference.ImportFromUrl,
'esri': osr.SpatialReference.ImportFromESRI,
'epsg': osr.SpatialReference.ImportFromEPSG,
'epsga': osr.SpatialReference.ImportFromEPSGA,
'pci': osr.SpatialReference.ImportFromPCI,
'usgs': osr.SpatialReference.ImportFromUSGS,
'xml': osr.SpatialReference.ImportFromXML,
'erm': osr.SpatialReference.ImportFromERM}
# OGR Geometry spatial predicate methods.
spatial_queries = {'CONTAINS': ogr.Geometry.Contains,
'CROSSES': ogr.Geometry.Crosses,
'DISJOINT': ogr.Geometry.Disjoint,
'EQUALS': ogr.Geometry.Equals,
'INTERSECTS': ogr.Geometry.Intersects,
'OVERLAPS': ogr.Geometry.Overlaps,
'TOUCHES': ogr.Geometry.Touches,
'WITHIN': ogr.Geometry.Within}
def add_attribute(iterable, value=None):
"""
Add an attribute to Features in a generator.
Parameters:
- iterable:
The features to iterate over (list/tuple/generator).
- value (optional):
Default value for attribute.
Yields:
- feature:
The result of the tested record (Feature).
"""
for feature in iterable:
feature.attributes.append(value)
yield feature
def cascaded_union(geoms):
"""
Union multiple OGR Geometries into a single Geometry.
Parameters:
- geoms:
The OGR Geometries to iterate over (list/tuple/generator).
Returns:
- geom:
The resulting geometry (ogr.Geometry).
"""
geometry = ogr.Geometry(ogr.wkbMultiPolygon)
for geom in geoms:
geometry.AddGeometry(geom)
geom = geometry.UnionCascaded()
del geometry
return geom
def create_layer(datasource, field_definitions, geometry_type, fields=None,
features=None, spatial_ref=None, layer=None, driver=None):
"""
Create a data source and layer file, and populate with Features.
Parameters:
- datasource:
Path to the output data source, potentially including the layer
name (str).
- field_definitions:
Feature definitions object or Layer field_definitions dict
(ogr.FeatureDefn/dict).
- geometry_type:
OGR geometry type for the output (int).
- fields (optional):
Field names for the output (list/tuple).
- features (optional):
The features to iterate over (list/tuple/generator).
- spatial_ref (optional):
OSR SpatialReference to set for the output (osr.SpatialReference).
- layer (optional):
Name for the output layer, if not given in datasource (str).
- driver (optional):
OGR name for the driver to use for the output (str).
Returns:
- ds:
The opened data source (ogr.DataSource).
- out_layer:
The created layer (ogr.Layer).
"""
_, ds = open_ds(datasource, driver, True, "rw")
layers = ds.GetLayerCount()
if layer is None:
layer = get_layer(datasource)
if layer.upper() in (ds.GetLayerByIndex(i).GetName().upper()
for i in xrange(layers)):
ds.DeleteLayer(layer)
out_layer = ds.CreateLayer(layer, spatial_ref, geometry_type)
if isinstance(field_definitions, ogr.FeatureDefn):
defn = field_definitions
else:
for field in field_definitions:
field_type, precision, width = field_definitions[field]
field_def = ogr.FieldDefn(field, field_type)
if precision:
field_def.SetPrecision(precision)
if width:
field_def.SetWidth(width)
out_layer.CreateField(field_def)
defn = out_layer.GetLayerDefn()
if features:
for feature in features:
if spatial_ref:
feature = feature.transform(spatial_ref, in_place=False)
feat = create_ogr_feature(defn, feature.ogr_geom,
feature.attributes, fields)
out_layer.CreateFeature(feat)
return ds, out_layer
def create_ogr_feature(definition, ogr_geom, attributes=[], fields=None):
"""
Create an OGR Feature object from a OGR Geometry, OGR FeatureDefn, and a
set of attributes.
Parameters:
- field_definitions:
Feature definitions object or Layer field_definitions dict
(ogr.FeatureDefn/dict).
- ogr_geom:
OGR geometry for the output (ogr.Geometry).
- attributes (optional):
The attributes to include in the output feature (list/tuple).
- fields (optional):
Field names for the output (list/tuple).
Returns:
- feature:
The created Feature object (ogr.Feature).
"""
feature = ogr.Feature(definition)
if fields is None:
fields = [definition.GetFieldDefn(i).GetName() for
i in xrange(definition.GetFieldCount())]
feature.SetGeometry(ogr_geom)
for field, attribute in zip(fields, attributes):
feature.SetField(field, attribute)
return feature
def extent_to_polygon(minx, miny, maxx, maxy):
"""
Create an OGR Geometry from a bounding box extent.
Parameters:
- minx:
Minimum x value for extent (int/float).
- miny:
Minimum y value for extent (int/float).
- maxx:
Maximum x value for extent (int/float).
- maxy:
Maxmum y value for extent (int/float).
Returns:
- ogr_geom:
The resultant OGR Geometry (ogr.Geometry)
"""
extent = "POLYGON (("
extent += "{0} {1}, {2} {1}, {2} {3}, {0} {3}, {0} {1}".format(
minx, miny, maxx, maxy)
extent += "))"
return import_geometries['wkt'](extent)
def get_layer(datasource):
"""
Gets the layer name of single-layer data sources. If not possible (e.g.
database connection strings), raises an exception.
Parameters:
- datasource:
File path for the datasource, or an OGR DataSource instance
(ogr.DataSource/str).
Returns:
- layer_name:
The name of the layer, determined as the basename of the input
data source, excluding any file extension (str).
"""
try:
if isinstance(datasource, ogr.DataSource):
datasource = datasource.GetName()
layer_name = os.path.basename(os.path.splitext(datasource)[0])
return layer_name
except:
print "\nNo layer parameter supplied when required by data source."
raise Exception()
def map_geom(func, iterable, *args, **kwargs):
"""
Apply spatial operations to Features in an iterable.
Parameters:
- func:
The function to use (func).
- iterable:
The features to iterate over (list/tuple/generator).
Yields:
- feature:
The result of the spatial operation (Feature).
"""
for i in iterable:
feature = func(i, *args, **kwargs)
if feature is not None:
yield feature
def open_ds(datasource, driver=None, create=False, mode="r"):
"""
Opens OGR DataSources.
Parameters:
- datasource:
File system path to an OGR-readable data source, or a database
connection string (str).
- driver (optional):
Name of the driver to use, in OGR format (str). If not
supplied, this will be determined from the file extension, or
by attempting all drivers (str).
- create (optional):
If True, create a new data source, otherwise if not found, raise an
exception (default) (bool).
- mode (optional):
Set "r" for read only, or "rw" for read/write. OGR 0/1 is also
accepted. Read only is default (int/str).
Returns:
- driver:
The driver used to open the DataSource (ogr.Driver).
- ds:
The opened OGR DataSource (ogr.DataSource).
"""
modes = {"r": 0, "rw": 1}
if mode in modes:
mode = modes[mode]
elif mode not in modes.values:
print "\nSupplied mode parameter value not valid."
raise Exception()
ext = os.path.splitext(datasource)[1]
if driver is None:
if ext in extensions:
driver = extensions[ext]
elif create:
print "\nNo driver parameter supplied to create data source."
raise Exception()
elif not isinstance(driver, ogr.Driver):
try:
driver = ogr.GetDriverByName(driver)
except:
print ("\nSupplied driver parameter value not valid, or driver " +
"not available.")
raise Exception()
if os.path.exists(datasource):
try:
if driver is None:
ds = ogr.Open(datasource, mode)
driver = ds.GetDriver()
else:
ds = driver.Open(datasource, mode)
except:
print ("\nFailed to open data source, file " +
"format not supported.")
raise Exception()
else:
if create:
try:
if not os.path.exists(os.path.dirname(datasource)):
os.makedirs(os.path.dirname(datasource))
ds = driver.CreateDataSource(datasource)
except:
print "\nCould not create Data Source {0}.".format(datasource)
raise Exception()
else:
print "\nData Source {0} does not exist.".format(datasource)
raise Exception()
return driver, ds
def spatial_query(query, iterable, feature):
"""
Filter Features in an iterable by spatial predicate.
Parameters:
- query:
The spatial predicate function to use (func).
- iterable:
The features to iterate over (list/tuple/generator).
- feature:
The operation feature to apply (Feature)
Yields:
- feature:
The result of the spatial operation (Feature).
"""
for i in iterable:
result = query(i, feature)
if result:
yield i
def update_attributes(iterable, field, value, fields, clause=None):
"""
Alter the value of an Feature attribute in an iterable.
Parameters:
- iterable:
The features to iterate over (list/tuple/generator).
- field:
The field to adjust (str).
- fields:
The fields present in the clause (list/tuple).
- clause:
The input query string (str).
Yields:
- feature:
The result of the spatial operation (Feature).
"""
idx = fields.index(field)
if clause:
query = Query(fields, clause)
for feature in iterable:
if query.test(feature.attributes):
feature.attributes[idx] = value
yield feature
else:
for feature in iterable:
feature.attributes[idx] = value
yield feature
def update_feature(func, iterable, *args):
"""
Apply spatial operations to Features in an iterable.
Parameters:
- func:
The function to use (func).
- iterable:
The features to iterate over (list/tuple/generator).
Yields:
- feature:
The result of the spatial operation (Feature).
"""
for feature in iterable:
func(feature, *args)
yield feature
class Query(object):
"""
Basic query evaluator. Will test any input to the query - no validation or
parsing, just replaces fields with test values and calls eval.
Methods:
- test:
Tests the attributes of a records against the acceptable input
clause.
Attributes:
- clause:
The input query string (str).
- fields:
The fields present in the clause (list/tuple).
"""
def __init__(self, fields, clause):
"""
Basic query evaluator. Will test any input to the query - no validation
or parsing, just replaces fields with test values and calls eval.
Parameters:
- fields:
Names of all fields present in the tested records, which should
match the clause (list/tuple).
- clause:
Query string to apply to the inputs. Input must be valid
Python, using unquoted field names and field values as they
would be defined in Python, e.g. field_1 >= 1 or field_2 == "A"
(str).
"""
self.fields = fields
self.clause = clause
def test(self, record):
"""
Test a record against the clause, extracting its values based on field
indicies.
Parameters:
- record:
The values to test. Attributes must be set in the order of the
instance fields attribute (list/tuple).
Returns:
- result:
The result of the tested record (bool).
"""
test = self.clause[:]
for field in self.fields:
test_value = record[self.fields.index(field)]
if type(test_value) in (str, unicode):
test = test.replace(field + ' ', + test_value + '" ')
else:
test = test.replace(field + ' ', str(test_value) + ' ')
test = test.replace(" = ", " == ")
return eval(test)
| ssast/easyogr | core.py | Python | gpl-3.0 | 17,159 |
# Ensure that tests are importing the local copy of trex rather than
# any system-installed copy of trex that might exist in the path.
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir))
import trex
sys.path.pop(0)
| anthonyalmarza/trex | tests/__init__.py | Python | mit | 259 |
"""
After running the recipe in this directory, we get a large JSON file called all_covid19_datasets.json.
This script reads that JSON file, adds to it using the directives pull from a Google sheet, and
produces a new JSON file called demo_data.json.
"""
import csv
import dataclasses
import json
import os
import pathlib
import time
from typing import Dict, List
from datahub.metadata.schema_classes import (
AuditStampClass,
CorpUserInfoClass,
CorpUserSnapshotClass,
DatasetLineageTypeClass,
DatasetSnapshotClass,
EditableSchemaMetadataClass,
GlobalTagsClass,
MetadataChangeEventClass,
OwnerClass,
OwnershipClass,
OwnershipTypeClass,
UpstreamClass,
UpstreamLineageClass,
)
DEMO_DATA_DIR = pathlib.Path("./examples/demo_data")
INPUT_ALL_DATASETS = DEMO_DATA_DIR / "all_covid19_datasets.json"
OUTPUT_ENRICHED = DEMO_DATA_DIR / "demo_data.json"
DIRECTIVES_CSV = DEMO_DATA_DIR / "directives.csv"
@dataclasses.dataclass
class Directive:
table: str
drop: bool
owners: List[str]
depends_on: List[str]
def read_mces(path: os.PathLike) -> List[MetadataChangeEventClass]:
with open(path) as f:
objs = json.load(f)
mces = [MetadataChangeEventClass.from_obj(obj) for obj in objs]
return mces
def write_mces(path: os.PathLike, mces: List[MetadataChangeEventClass]) -> None:
objs = [mce.to_obj() for mce in mces]
with open(path, "w") as f:
json.dump(objs, f, indent=4)
def parse_directive(row: Dict) -> Directive:
return Directive(
table=row["table"],
drop=bool(row["drop"]),
owners=[x.strip() for x in row["owners"].split(",") if x],
depends_on=[x.strip() for x in row["depends_on"].split(",") if x],
)
def fetch_directives() -> List[Directive]:
with open(DIRECTIVES_CSV, "r") as f:
reader = csv.DictReader(f)
rows = [parse_directive(row) for row in reader]
return rows
def dataset_name_to_urn(name: str) -> str:
return f"urn:li:dataset:(urn:li:dataPlatform:bigquery,{name},PROD)"
def clean_owner_name(name: str) -> str:
clean = "".join(c for c in name if c.isalpha())
return clean
def owner_name_to_urn(name: str) -> str:
return f"urn:li:corpuser:{name}"
def create_owner_entity_mce(owner: str) -> MetadataChangeEventClass:
clean_name = clean_owner_name(owner)
return MetadataChangeEventClass(
proposedSnapshot=CorpUserSnapshotClass(
urn=owner_name_to_urn(clean_name),
aspects=[
CorpUserInfoClass(
active=True,
displayName=owner,
fullName=owner,
email=f"{clean_name}-demo@example.com",
)
],
)
)
def create_ownership_aspect_mce(directive: Directive) -> MetadataChangeEventClass:
return MetadataChangeEventClass(
proposedSnapshot=DatasetSnapshotClass(
urn=dataset_name_to_urn(directive.table),
aspects=[
OwnershipClass(
owners=[
OwnerClass(
owner=owner_name_to_urn(clean_owner_name(owner)),
type=OwnershipTypeClass.DATAOWNER,
)
for owner in directive.owners
],
lastModified=AuditStampClass(
time=int(time.time() * 1000),
actor="urn:li:corpuser:datahub",
),
)
],
)
)
def create_lineage_aspect_mce(directive: Directive) -> MetadataChangeEventClass:
return MetadataChangeEventClass(
proposedSnapshot=DatasetSnapshotClass(
urn=dataset_name_to_urn(directive.table),
aspects=[
UpstreamLineageClass(
upstreams=[
UpstreamClass(
dataset=dataset_name_to_urn(upstream),
type=DatasetLineageTypeClass.TRANSFORMED,
auditStamp=AuditStampClass(
time=int(time.time() * 1000),
actor="urn:li:corpuser:datahub",
),
)
for upstream in directive.depends_on
]
)
],
)
)
def create_global_tags_aspect_mce(directive: Directive) -> MetadataChangeEventClass:
return MetadataChangeEventClass(
proposedSnapshot=DatasetSnapshotClass(
urn=dataset_name_to_urn(directive.table),
aspects=[GlobalTagsClass(tags=[])],
)
)
def create_editable_schema_info_aspect_mce(
directive: Directive,
) -> MetadataChangeEventClass:
return MetadataChangeEventClass(
proposedSnapshot=DatasetSnapshotClass(
urn=dataset_name_to_urn(directive.table),
aspects=[
EditableSchemaMetadataClass(
created=AuditStampClass(
time=int(time.time() * 1000),
actor="urn:li:corpuser:datahub",
),
lastModified=AuditStampClass(
time=int(time.time() * 1000),
actor="urn:li:corpuser:datahub",
),
editableSchemaFieldInfo=[],
)
],
)
)
if __name__ == "__main__":
datasets = read_mces(INPUT_ALL_DATASETS)
all_directives = fetch_directives()
directives = [directive for directive in all_directives if not directive.drop]
all_dataset_urns = {
dataset_name_to_urn(directive.table) for directive in all_directives
}
allowed_urns = {
dataset_name_to_urn(directive.table)
for directive in all_directives
if not directive.drop
}
missing_dataset_directives = [
dataset.proposedSnapshot.urn
for dataset in datasets
if dataset.proposedSnapshot.urn not in all_dataset_urns
]
assert not missing_dataset_directives
filtered_dataset_mces = [
dataset for dataset in datasets if dataset.proposedSnapshot.urn in allowed_urns
]
owner_names = {owner for directive in directives for owner in directive.owners}
owner_entity_mces = [
create_owner_entity_mce(owner) for owner in sorted(owner_names)
]
ownership_aspect_mces = [
create_ownership_aspect_mce(directive)
for directive in directives
if directive.owners
]
lineage_aspect_mces = [
create_lineage_aspect_mce(directive)
for directive in directives
if directive.depends_on
]
global_tags_aspect_mces = [
create_global_tags_aspect_mce(directive)
for directive in directives
if not directive.drop
]
editable_schema_info_aspect_mces = [
create_editable_schema_info_aspect_mce(directive)
for directive in directives
if not directive.drop
]
enriched_mces = (
filtered_dataset_mces
+ owner_entity_mces
+ ownership_aspect_mces
+ lineage_aspect_mces
+ global_tags_aspect_mces
+ editable_schema_info_aspect_mces
)
write_mces(OUTPUT_ENRICHED, enriched_mces)
| linkedin/WhereHows | metadata-ingestion/examples/demo_data/enrich.py | Python | apache-2.0 | 7,385 |
# Copyright 2002-2005 Vladimir Prus.
# Copyright 2002-2003 Dave Abrahams.
# Copyright 2006 Rene Rivera.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import TestCmd
import copy
import fnmatch
import glob
import math
import os
import os.path
import re
import shutil
import StringIO
import subprocess
import sys
import tempfile
import time
import traceback
import tree
import types
from xml.sax.saxutils import escape
class TestEnvironmentError(Exception):
pass
annotations = []
def print_annotation(name, value, xml):
"""Writes some named bits of information about the current test run."""
if xml:
print escape(name) + " {{{"
print escape(value)
print "}}}"
else:
print name + " {{{"
print value
print "}}}"
def flush_annotations(xml=0):
global annotations
for ann in annotations:
print_annotation(ann[0], ann[1], xml)
annotations = []
def clear_annotations():
global annotations
annotations = []
defer_annotations = 0
def set_defer_annotations(n):
global defer_annotations
defer_annotations = n
def annotate_stack_trace(tb=None):
if tb:
trace = TestCmd.caller(traceback.extract_tb(tb), 0)
else:
trace = TestCmd.caller(traceback.extract_stack(), 1)
annotation("stacktrace", trace)
def annotation(name, value):
"""Records an annotation about the test run."""
annotations.append((name, value))
if not defer_annotations:
flush_annotations()
def get_toolset():
toolset = None
for arg in sys.argv[1:]:
if not arg.startswith("-"):
toolset = arg
return toolset or "gcc"
# Detect the host OS.
cygwin = hasattr(os, "uname") and os.uname()[0].lower().startswith("cygwin")
windows = cygwin or os.environ.get("OS", "").lower().startswith("windows")
if cygwin:
default_os = "cygwin"
elif windows:
default_os = "windows"
elif hasattr(os, "uname"):
default_os = os.uname()[0].lower()
def prepare_prefixes_and_suffixes(toolset, target_os=default_os):
prepare_suffix_map(toolset, target_os)
prepare_library_prefix(toolset, target_os)
def prepare_suffix_map(toolset, target_os=default_os):
"""
Set up suffix translation performed by the Boost Build testing framework
to accommodate different toolsets generating targets of the same type using
different filename extensions (suffixes).
"""
global suffixes
suffixes = {}
if target_os in ["windows", "cygwin"]:
if toolset == "gcc":
suffixes[".lib"] = ".a" # mingw static libs use suffix ".a".
suffixes[".obj"] = ".o"
if target_os == "cygwin":
suffixes[".implib"] = ".lib.a"
else:
suffixes[".implib"] = ".lib"
else:
suffixes[".exe"] = ""
suffixes[".dll"] = ".so"
suffixes[".lib"] = ".a"
suffixes[".obj"] = ".o"
suffixes[".implib"] = ".no_implib_files_on_this_platform"
if target_os == "darwin":
suffixes[".dll"] = ".dylib"
def prepare_library_prefix(toolset, target_os=default_os):
"""
Setup whether Boost Build is expected to automatically prepend prefixes
to its built library targets.
"""
global lib_prefix
lib_prefix = "lib"
global dll_prefix
if target_os == "cygwin":
dll_prefix = "cyg"
elif target_os == "windows" and toolset != "gcc":
dll_prefix = None
else:
dll_prefix = "lib"
def re_remove(sequence, regex):
me = re.compile(regex)
result = filter(lambda x: me.match(x), sequence)
if not result:
raise ValueError()
for r in result:
sequence.remove(r)
def glob_remove(sequence, pattern):
result = fnmatch.filter(sequence, pattern)
if not result:
raise ValueError()
for r in result:
sequence.remove(r)
class Tester(TestCmd.TestCmd):
"""Main tester class for Boost Build.
Optional arguments:
`arguments` - Arguments passed to the run executable.
`executable` - Name of the executable to invoke.
`match` - Function to use for compating actual and
expected file contents.
`boost_build_path` - Boost build path to be passed to the run
executable.
`translate_suffixes` - Whether to update suffixes on the the file
names passed from the test script so they
match those actually created by the current
toolset. For example, static library files
are specified by using the .lib suffix but
when the "gcc" toolset is used it actually
creates them using the .a suffix.
`pass_toolset` - Whether the test system should pass the
specified toolset to the run executable.
`use_test_config` - Whether the test system should tell the run
executable to read in the test_config.jam
configuration file.
`ignore_toolset_requirements` - Whether the test system should tell the run
executable to ignore toolset requirements.
`workdir` - Absolute directory where the test will be
run from.
`pass_d0` - If set, when tests are not explicitly run
in verbose mode, they are run as silent
(-d0 & --quiet Boost Jam options).
Optional arguments inherited from the base class:
`description` - Test description string displayed in case
of a failed test.
`subdir` - List of subdirectories to automatically
create under the working directory. Each
subdirectory needs to be specified
separately, parent coming before its child.
`verbose` - Flag that may be used to enable more
verbose test system output. Note that it
does not also enable more verbose build
system output like the --verbose command
line option does.
"""
def __init__(self, arguments=None, executable="bjam",
match=TestCmd.match_exact, boost_build_path=None,
translate_suffixes=True, pass_toolset=True, use_test_config=True,
ignore_toolset_requirements=False, workdir="", pass_d0=False,
**keywords):
assert arguments.__class__ is not str
self.original_workdir = os.path.dirname(__file__)
if workdir and not os.path.isabs(workdir):
raise ("Parameter workdir <%s> must point to an absolute "
"directory: " % workdir)
self.last_build_timestamp = 0
self.translate_suffixes = translate_suffixes
self.use_test_config = use_test_config
self.toolset = get_toolset()
self.pass_toolset = pass_toolset
self.ignore_toolset_requirements = ignore_toolset_requirements
prepare_prefixes_and_suffixes(pass_toolset and self.toolset or "gcc")
use_default_bjam = "--default-bjam" in sys.argv
if not use_default_bjam:
jam_build_dir = ""
if os.name == "nt":
jam_build_dir = "bin.ntx86"
elif (os.name == "posix") and os.__dict__.has_key("uname"):
if os.uname()[0].lower().startswith("cygwin"):
jam_build_dir = "bin.cygwinx86"
if ("TMP" in os.environ and
os.environ["TMP"].find("~") != -1):
print("Setting $TMP to /tmp to get around problem "
"with short path names")
os.environ["TMP"] = "/tmp"
elif os.uname()[0] == "Linux":
cpu = os.uname()[4]
if re.match("i.86", cpu):
jam_build_dir = "bin.linuxx86"
else:
jam_build_dir = "bin.linux" + os.uname()[4]
elif os.uname()[0] == "SunOS":
jam_build_dir = "bin.solaris"
elif os.uname()[0] == "Darwin":
if os.uname()[4] == "i386":
jam_build_dir = "bin.macosxx86"
elif os.uname()[4] == "x86_64":
jam_build_dir = "bin.macosxx86_64"
else:
jam_build_dir = "bin.macosxppc"
elif os.uname()[0] == "AIX":
jam_build_dir = "bin.aix"
elif os.uname()[0] == "IRIX64":
jam_build_dir = "bin.irix"
elif os.uname()[0] == "FreeBSD":
jam_build_dir = "bin.freebsd"
elif os.uname()[0] == "OSF1":
jam_build_dir = "bin.osf"
else:
raise ("Do not know directory where Jam is built for this "
"system: %s/%s" % (os.name, os.uname()[0]))
else:
raise ("Do not know directory where Jam is built for this "
"system: %s" % os.name)
# Find where jam_src is located. Try for the debug version if it is
# lying around.
srcdir = os.path.join(os.path.dirname(__file__), "..", "src")
dirs = [os.path.join(srcdir, "engine", jam_build_dir + ".debug"),
os.path.join(srcdir, "engine", jam_build_dir)]
for d in dirs:
if os.path.exists(d):
jam_build_dir = d
break
else:
print("Cannot find built Boost.Jam")
sys.exit(1)
verbosity = ["-d0", "--quiet"]
if not pass_d0:
verbosity = []
if "--verbose" in sys.argv:
keywords["verbose"] = True
verbosity = ["-d2"]
self.verbosity = verbosity
if boost_build_path is None:
boost_build_path = self.original_workdir + "/.."
program_list = []
if use_default_bjam:
program_list.append(executable)
else:
program_list.append(os.path.join(jam_build_dir, executable))
program_list.append('-sBOOST_BUILD_PATH="' + boost_build_path + '"')
if arguments:
program_list += arguments
TestCmd.TestCmd.__init__(self, program=program_list, match=match,
workdir=workdir, inpath=use_default_bjam, **keywords)
os.chdir(self.workdir)
def cleanup(self):
try:
TestCmd.TestCmd.cleanup(self)
os.chdir(self.original_workdir)
except AttributeError:
# When this is called during TestCmd.TestCmd.__del__ we can have
# both 'TestCmd' and 'os' unavailable in our scope. Do nothing in
# this case.
pass
def set_toolset(self, toolset, target_os=default_os):
self.toolset = toolset
self.pass_toolset = True
prepare_prefixes_and_suffixes(toolset, target_os)
#
# Methods that change the working directory's content.
#
def set_tree(self, tree_location):
# It is not possible to remove the current directory.
d = os.getcwd()
os.chdir(os.path.dirname(self.workdir))
shutil.rmtree(self.workdir, ignore_errors=False)
if not os.path.isabs(tree_location):
tree_location = os.path.join(self.original_workdir, tree_location)
shutil.copytree(tree_location, self.workdir)
os.chdir(d)
def make_writable(unused, dir, entries):
for e in entries:
name = os.path.join(dir, e)
os.chmod(name, os.stat(name).st_mode | 0222)
os.path.walk(".", make_writable, None)
def write(self, file, content, wait=True):
nfile = self.native_file_name(file)
self.__makedirs(os.path.dirname(nfile), wait)
f = open(nfile, "wb")
try:
f.write(content)
finally:
f.close()
self.__ensure_newer_than_last_build(nfile)
def copy(self, src, dst):
try:
self.write(dst, self.read(src, binary=True))
except:
self.fail_test(1)
def copy_preserving_timestamp(self, src, dst):
src_name = self.native_file_name(src)
dst_name = self.native_file_name(dst)
stats = os.stat(src_name)
self.write(dst, self.__read(src, binary=True))
os.utime(dst_name, (stats.st_atime, stats.st_mtime))
def touch(self, names, wait=True):
if names.__class__ is str:
names = [names]
for name in names:
path = self.native_file_name(name)
if wait:
self.__ensure_newer_than_last_build(path)
else:
os.utime(path, None)
def rm(self, names):
if not type(names) == types.ListType:
names = [names]
if names == ["."]:
# If we are deleting the entire workspace, there is no need to wait
# for a clock tick.
self.last_build_timestamp = 0
# Avoid attempts to remove the current directory.
os.chdir(self.original_workdir)
for name in names:
n = glob.glob(self.native_file_name(name))
if n: n = n[0]
if not n:
n = self.glob_file(name.replace("$toolset", self.toolset + "*")
)
if n:
if os.path.isdir(n):
shutil.rmtree(n, ignore_errors=False)
else:
os.unlink(n)
# Create working dir root again in case we removed it.
if not os.path.exists(self.workdir):
os.mkdir(self.workdir)
os.chdir(self.workdir)
def expand_toolset(self, name):
"""
Expands $toolset placeholder in the given file to the name of the
toolset currently being tested.
"""
self.write(name, self.read(name).replace("$toolset", self.toolset))
def dump_stdio(self):
annotation("STDOUT", self.stdout())
annotation("STDERR", self.stderr())
def run_build_system(self, extra_args=None, subdir="", stdout=None,
stderr="", status=0, match=None, pass_toolset=None,
use_test_config=None, ignore_toolset_requirements=None,
expected_duration=None, **kw):
assert extra_args.__class__ is not str
if os.path.isabs(subdir):
print("You must pass a relative directory to subdir <%s>." % subdir
)
return
self.previous_tree, dummy = tree.build_tree(self.workdir)
self.wait_for_time_change_since_last_build()
if match is None:
match = self.match
if pass_toolset is None:
pass_toolset = self.pass_toolset
if use_test_config is None:
use_test_config = self.use_test_config
if ignore_toolset_requirements is None:
ignore_toolset_requirements = self.ignore_toolset_requirements
try:
kw["program"] = []
kw["program"] += self.program
if extra_args:
kw["program"] += extra_args
if stdout is None and not any(a.startswith("-d") for a in kw["program"]):
kw["program"] += self.verbosity
if pass_toolset:
kw["program"].append("toolset=" + self.toolset)
if use_test_config:
kw["program"].append('--test-config="%s"' % os.path.join(
self.original_workdir, "test-config.jam"))
if ignore_toolset_requirements:
kw["program"].append("--ignore-toolset-requirements")
if "--python" in sys.argv:
# -z disables Python optimization mode.
# this enables type checking (all assert
# and if __debug__ statements).
kw["program"].extend(["--python", "-z"])
if "--stacktrace" in sys.argv:
kw["program"].append("--stacktrace")
kw["chdir"] = subdir
self.last_program_invocation = kw["program"]
build_time_start = time.time()
apply(TestCmd.TestCmd.run, [self], kw)
build_time_finish = time.time()
except:
self.dump_stdio()
raise
old_last_build_timestamp = self.last_build_timestamp
self.tree, self.last_build_timestamp = tree.build_tree(self.workdir)
self.difference = tree.tree_difference(self.previous_tree, self.tree)
if self.difference.empty():
# If nothing has been changed by this build and sufficient time has
# passed since the last build that actually changed something,
# there is no need to wait for touched or newly created files to
# start getting newer timestamps than the currently existing ones.
self.last_build_timestamp = old_last_build_timestamp
self.difference.ignore_directories()
self.unexpected_difference = copy.deepcopy(self.difference)
if (status and self.status) is not None and self.status != status:
expect = ""
if status != 0:
expect = " (expected %d)" % status
annotation("failure", '"%s" returned %d%s' % (kw["program"],
self.status, expect))
annotation("reason", "unexpected status returned by bjam")
self.fail_test(1)
if stdout is not None and not match(self.stdout(), stdout):
stdout_test = match(self.stdout(), stdout)
annotation("failure", "Unexpected stdout")
annotation("Expected STDOUT", stdout)
annotation("Actual STDOUT", self.stdout())
stderr = self.stderr()
if stderr:
annotation("STDERR", stderr)
self.maybe_do_diff(self.stdout(), stdout, stdout_test)
self.fail_test(1, dump_stdio=False)
# Intel tends to produce some messages to stderr which make tests fail.
intel_workaround = re.compile("^xi(link|lib): executing.*\n", re.M)
actual_stderr = re.sub(intel_workaround, "", self.stderr())
if stderr is not None and not match(actual_stderr, stderr):
stderr_test = match(actual_stderr, stderr)
annotation("failure", "Unexpected stderr")
annotation("Expected STDERR", stderr)
annotation("Actual STDERR", self.stderr())
annotation("STDOUT", self.stdout())
self.maybe_do_diff(actual_stderr, stderr, stderr_test)
self.fail_test(1, dump_stdio=False)
if expected_duration is not None:
actual_duration = build_time_finish - build_time_start
if actual_duration > expected_duration:
print("Test run lasted %f seconds while it was expected to "
"finish in under %f seconds." % (actual_duration,
expected_duration))
self.fail_test(1, dump_stdio=False)
self.__ignore_junk()
def glob_file(self, name):
name = self.adjust_name(name)
result = None
if hasattr(self, "difference"):
for f in (self.difference.added_files +
self.difference.modified_files +
self.difference.touched_files):
if fnmatch.fnmatch(f, name):
result = self.__native_file_name(f)
break
if not result:
result = glob.glob(self.__native_file_name(name))
if result:
result = result[0]
return result
def __read(self, name, binary=False):
try:
openMode = "r"
if binary:
openMode += "b"
else:
openMode += "U"
f = open(name, openMode)
result = f.read()
f.close()
return result
except:
annotation("failure", "Could not open '%s'" % name)
self.fail_test(1)
return ""
def read(self, name, binary=False):
name = self.glob_file(name)
return self.__read(name, binary=binary)
def read_and_strip(self, name):
if not self.glob_file(name):
return ""
f = open(self.glob_file(name), "rb")
lines = f.readlines()
f.close()
result = "\n".join(x.rstrip() for x in lines)
if lines and lines[-1][-1] != "\n":
return result + "\n"
return result
def fail_test(self, condition, dump_difference=True, dump_stdio=True,
dump_stack=True):
if not condition:
return
if dump_difference and hasattr(self, "difference"):
f = StringIO.StringIO()
self.difference.pprint(f)
annotation("changes caused by the last build command",
f.getvalue())
if dump_stdio:
self.dump_stdio()
if "--preserve" in sys.argv:
print
print "*** Copying the state of working dir into 'failed_test' ***"
print
path = os.path.join(self.original_workdir, "failed_test")
if os.path.isdir(path):
shutil.rmtree(path, ignore_errors=False)
elif os.path.exists(path):
raise "Path " + path + " already exists and is not a directory"
shutil.copytree(self.workdir, path)
print "The failed command was:"
print " ".join(self.last_program_invocation)
if dump_stack:
annotate_stack_trace()
sys.exit(1)
# A number of methods below check expectations with actual difference
# between directory trees before and after a build. All the 'expect*'
# methods require exact names to be passed. All the 'ignore*' methods allow
# wildcards.
# All names can be either a string or a list of strings.
def expect_addition(self, names):
for name in self.adjust_names(names):
try:
glob_remove(self.unexpected_difference.added_files, name)
except:
annotation("failure", "File %s not added as expected" % name)
self.fail_test(1)
def ignore_addition(self, wildcard):
self.__ignore_elements(self.unexpected_difference.added_files,
wildcard)
def expect_removal(self, names):
for name in self.adjust_names(names):
try:
glob_remove(self.unexpected_difference.removed_files, name)
except:
annotation("failure", "File %s not removed as expected" % name)
self.fail_test(1)
def ignore_removal(self, wildcard):
self.__ignore_elements(self.unexpected_difference.removed_files,
wildcard)
def expect_modification(self, names):
for name in self.adjust_names(names):
try:
glob_remove(self.unexpected_difference.modified_files, name)
except:
annotation("failure", "File %s not modified as expected" %
name)
self.fail_test(1)
def ignore_modification(self, wildcard):
self.__ignore_elements(self.unexpected_difference.modified_files,
wildcard)
def expect_touch(self, names):
d = self.unexpected_difference
for name in self.adjust_names(names):
# We need to check both touched and modified files. The reason is
# that:
# (1) Windows binaries such as obj, exe or dll files have slight
# differences even with identical inputs due to Windows PE
# format headers containing an internal timestamp.
# (2) Intel's compiler for Linux has the same behaviour.
filesets = [d.modified_files, d.touched_files]
while filesets:
try:
glob_remove(filesets[-1], name)
break
except ValueError:
filesets.pop()
if not filesets:
annotation("failure", "File %s not touched as expected" % name)
self.fail_test(1)
def ignore_touch(self, wildcard):
self.__ignore_elements(self.unexpected_difference.touched_files,
wildcard)
def ignore(self, wildcard):
self.ignore_addition(wildcard)
self.ignore_removal(wildcard)
self.ignore_modification(wildcard)
self.ignore_touch(wildcard)
def expect_nothing(self, names):
for name in self.adjust_names(names):
if name in self.difference.added_files:
annotation("failure",
"File %s added, but no action was expected" % name)
self.fail_test(1)
if name in self.difference.removed_files:
annotation("failure",
"File %s removed, but no action was expected" % name)
self.fail_test(1)
pass
if name in self.difference.modified_files:
annotation("failure",
"File %s modified, but no action was expected" % name)
self.fail_test(1)
if name in self.difference.touched_files:
annotation("failure",
"File %s touched, but no action was expected" % name)
self.fail_test(1)
def __ignore_junk(self):
# Not totally sure about this change, but I do not see a good
# alternative.
if windows:
self.ignore("*.ilk") # MSVC incremental linking files.
self.ignore("*.pdb") # MSVC program database files.
self.ignore("*.rsp") # Response files.
self.ignore("*.tds") # Borland debug symbols.
self.ignore("*.manifest") # MSVC DLL manifests.
self.ignore("bin/standalone/msvc/*/msvc-setup.bat")
# Debug builds of bjam built with gcc produce this profiling data.
self.ignore("gmon.out")
self.ignore("*/gmon.out")
# Boost Build's 'configure' functionality (unfinished at the time)
# produces this file.
self.ignore("bin/config.log")
self.ignore("bin/project-cache.jam")
# Compiled Python files created when running Python based Boost Build.
self.ignore("*.pyc")
# OSX/Darwin files and dirs.
self.ignore("*.dSYM/*")
def expect_nothing_more(self):
if not self.unexpected_difference.empty():
annotation("failure", "Unexpected changes found")
output = StringIO.StringIO()
self.unexpected_difference.pprint(output)
annotation("unexpected changes", output.getvalue())
self.fail_test(1)
def expect_output_lines(self, lines, expected=True):
self.__expect_lines(self.stdout(), lines, expected)
def expect_content_lines(self, filename, line, expected=True):
self.__expect_lines(self.read_and_strip(filename), line, expected)
def expect_content(self, name, content, exact=False):
actual = self.read(name)
content = content.replace("$toolset", self.toolset + "*")
matched = False
if exact:
matched = fnmatch.fnmatch(actual, content)
else:
def sorted_(x):
x.sort(lambda x, y: cmp(x.lower().replace("\\","/"), y.lower().replace("\\","/")))
return x
actual_ = map(lambda x: sorted_(x.split()), actual.splitlines())
content_ = map(lambda x: sorted_(x.split()), content.splitlines())
if len(actual_) == len(content_):
matched = map(
lambda x, y: map(lambda n, p: fnmatch.fnmatch(n, p), x, y),
actual_, content_)
matched = reduce(
lambda x, y: x and reduce(
lambda a, b: a and b,
y, True),
matched, True)
if not matched:
print "Expected:\n"
print content
print "Got:\n"
print actual
self.fail_test(1)
def maybe_do_diff(self, actual, expected, result=None):
if os.environ.get("DO_DIFF"):
e = tempfile.mktemp("expected")
a = tempfile.mktemp("actual")
f = open(e, "w")
f.write(expected)
f.close()
f = open(a, "w")
f.write(actual)
f.close()
print("DIFFERENCE")
# Current diff should return 1 to indicate 'different input files'
# but some older diff versions may return 0 and depending on the
# exact Python/OS platform version, os.system() call may gobble up
# the external process's return code and return 0 itself.
if os.system('diff -u "%s" "%s"' % (e, a)) not in [0, 1]:
print('Unable to compute difference: diff -u "%s" "%s"' % (e, a
))
os.unlink(e)
os.unlink(a)
elif type(result) is TestCmd.MatchError:
print(result.message)
else:
print("Set environmental variable 'DO_DIFF' to examine the "
"difference.")
# Internal methods.
def adjust_lib_name(self, name):
global lib_prefix
global dll_prefix
result = name
pos = name.rfind(".")
if pos != -1:
suffix = name[pos:]
if suffix == ".lib":
(head, tail) = os.path.split(name)
if lib_prefix:
tail = lib_prefix + tail
result = os.path.join(head, tail)
elif suffix == ".dll":
(head, tail) = os.path.split(name)
if dll_prefix:
tail = dll_prefix + tail
result = os.path.join(head, tail)
# If we want to use this name in a Jamfile, we better convert \ to /,
# as otherwise we would have to quote \.
result = result.replace("\\", "/")
return result
def adjust_suffix(self, name):
if not self.translate_suffixes:
return name
pos = name.rfind(".")
if pos == -1:
return name
suffix = name[pos:]
return name[:pos] + suffixes.get(suffix, suffix)
# Acceps either a string or a list of strings and returns a list of
# strings. Adjusts suffixes on all names.
def adjust_names(self, names):
if names.__class__ is str:
names = [names]
r = map(self.adjust_lib_name, names)
r = map(self.adjust_suffix, r)
r = map(lambda x, t=self.toolset: x.replace("$toolset", t + "*"), r)
return r
def adjust_name(self, name):
return self.adjust_names(name)[0]
def __native_file_name(self, name):
return os.path.normpath(os.path.join(self.workdir, *name.split("/")))
def native_file_name(self, name):
return self.__native_file_name(self.adjust_name(name))
def wait_for_time_change(self, path, touch):
"""
Wait for newly assigned file system modification timestamps for the
given path to become large enough for the timestamp difference to be
correctly recognized by both this Python based testing framework and
the Boost Jam executable being tested. May optionally touch the given
path to set its modification timestamp to the new value.
"""
self.__wait_for_time_change(path, touch, last_build_time=False)
def wait_for_time_change_since_last_build(self):
"""
Wait for newly assigned file system modification timestamps to
become large enough for the timestamp difference to be
correctly recognized by the Python based testing framework.
Does not care about Jam's timestamp resolution, since we
only need this to detect touched files.
"""
if self.last_build_timestamp:
timestamp_file = "timestamp-3df2f2317e15e4a9"
open(timestamp_file, "wb").close()
self.__wait_for_time_change_impl(timestamp_file,
self.last_build_timestamp,
self.__python_timestamp_resolution(timestamp_file, 0), 0)
os.unlink(timestamp_file)
def __build_timestamp_resolution(self):
"""
Returns the minimum path modification timestamp resolution supported
by the used Boost Jam executable.
"""
dir = tempfile.mkdtemp("bjam_version_info")
try:
jam_script = "timestamp_resolution.jam"
f = open(os.path.join(dir, jam_script), "w")
try:
f.write("EXIT $(JAM_TIMESTAMP_RESOLUTION) : 0 ;")
finally:
f.close()
p = subprocess.Popen([self.program[0], "-d0", "-f%s" % jam_script],
stdout=subprocess.PIPE, cwd=dir, universal_newlines=True)
out, err = p.communicate()
finally:
shutil.rmtree(dir, ignore_errors=False)
if p.returncode != 0:
raise TestEnvironmentError("Unexpected return code (%s) when "
"detecting Boost Jam's minimum supported path modification "
"timestamp resolution version information." % p.returncode)
if err:
raise TestEnvironmentError("Unexpected error output (%s) when "
"detecting Boost Jam's minimum supported path modification "
"timestamp resolution version information." % err)
r = re.match("([0-9]{2}):([0-9]{2}):([0-9]{2}\\.[0-9]{9})$", out)
if not r:
# Older Boost Jam versions did not report their minimum supported
# path modification timestamp resolution and did not actually
# support path modification timestamp resolutions finer than 1
# second.
# TODO: Phase this support out to avoid such fallback code from
# possibly covering up other problems.
return 1
if r.group(1) != "00" or r.group(2) != "00": # hours, minutes
raise TestEnvironmentError("Boost Jam with too coarse minimum "
"supported path modification timestamp resolution (%s:%s:%s)."
% (r.group(1), r.group(2), r.group(3)))
return float(r.group(3)) # seconds.nanoseconds
def __ensure_newer_than_last_build(self, path):
"""
Updates the given path's modification timestamp after waiting for the
newly assigned file system modification timestamp to become large
enough for the timestamp difference between it and the last build
timestamp to be correctly recognized by both this Python based testing
framework and the Boost Jam executable being tested. Does nothing if
there is no 'last build' information available.
"""
if self.last_build_timestamp:
self.__wait_for_time_change(path, touch=True, last_build_time=True)
def __expect_lines(self, data, lines, expected):
"""
Checks whether the given data contains the given lines.
Data may be specified as a single string containing text lines
separated by newline characters.
Lines may be specified in any of the following forms:
* Single string containing text lines separated by newlines - the
given lines are searched for in the given data without any extra
data lines between them.
* Container of strings containing text lines separated by newlines
- the given lines are searched for in the given data with extra
data lines allowed between lines belonging to different strings.
* Container of strings containing text lines separated by newlines
and containers containing strings - the same as above with the
internal containers containing strings being interpreted as if
all their content was joined together into a single string
separated by newlines.
A newline at the end of any multi-line lines string is interpreted as
an expected extra trailig empty line.
"""
# str.splitlines() trims at most one trailing newline while we want the
# trailing newline to indicate that there should be an extra empty line
# at the end.
splitlines = lambda x : (x + "\n").splitlines()
if data is None:
data = []
elif data.__class__ is str:
data = splitlines(data)
if lines.__class__ is str:
lines = [splitlines(lines)]
else:
expanded = []
for x in lines:
if x.__class__ is str:
x = splitlines(x)
expanded.append(x)
lines = expanded
if _contains_lines(data, lines) != bool(expected):
output = []
if expected:
output = ["Did not find expected lines:"]
else:
output = ["Found unexpected lines:"]
first = True
for line_sequence in lines:
if line_sequence:
if first:
first = False
else:
output.append("...")
output.extend(" > " + line for line in line_sequence)
output.append("in output:")
output.extend(" > " + line for line in data)
annotation("failure", "\n".join(output))
self.fail_test(1)
def __ignore_elements(self, list, wildcard):
"""Removes in-place 'list' elements matching the given 'wildcard'."""
list[:] = filter(lambda x, w=wildcard: not fnmatch.fnmatch(x, w), list)
def __makedirs(self, path, wait):
"""
Creates a folder with the given path, together with any missing
parent folders. If WAIT is set, makes sure any newly created folders
have modification timestamps newer than the ones left behind by the
last build run.
"""
try:
if wait:
stack = []
while path and path not in stack and not os.path.isdir(path):
stack.append(path)
path = os.path.dirname(path)
while stack:
path = stack.pop()
os.mkdir(path)
self.__ensure_newer_than_last_build(path)
else:
os.makedirs(path)
except Exception:
pass
def __python_timestamp_resolution(self, path, minimum_resolution):
"""
Returns the modification timestamp resolution for the given path
supported by the used Python interpreter/OS/filesystem combination.
Will not check for resolutions less than the given minimum value. Will
change the path's modification timestamp in the process.
Return values:
0 - nanosecond resolution supported
positive decimal - timestamp resolution in seconds
"""
# Note on Python's floating point timestamp support:
# Python interpreter versions prior to Python 2.3 did not support
# floating point timestamps. Versions 2.3 through 3.3 may or may not
# support it depending on the configuration (may be toggled by calling
# os.stat_float_times(True/False) at program startup, disabled by
# default prior to Python 2.5 and enabled by default since). Python 3.3
# deprecated this configuration and 3.4 removed support for it after
# which floating point timestamps are always supported.
ver = sys.version_info[0:2]
python_nanosecond_support = ver >= (3, 4) or (ver >= (2, 3) and
os.stat_float_times())
# Minimal expected floating point difference used to account for
# possible imprecise floating point number representations. We want
# this number to be small (at least smaller than 0.0001) but still
# large enough that we can be sure that increasing a floating point
# value by 2 * eta guarantees the value read back will be increased by
# at least eta.
eta = 0.00005
stats_orig = os.stat(path)
def test_time(diff):
"""Returns whether a timestamp difference is detectable."""
os.utime(path, (stats_orig.st_atime, stats_orig.st_mtime + diff))
return os.stat(path).st_mtime > stats_orig.st_mtime + eta
# Test for nanosecond timestamp resolution support.
if not minimum_resolution and python_nanosecond_support:
if test_time(2 * eta):
return 0
# Detect the filesystem timestamp resolution. Note that there is no
# need to make this code 'as fast as possible' as, this function gets
# called before having to sleep until the next detectable modification
# timestamp value and that, since we already know nanosecond resolution
# is not supported, will surely take longer than whatever we do here to
# detect this minimal detectable modification timestamp resolution.
step = 0.1
if not python_nanosecond_support:
# If Python does not support nanosecond timestamp resolution we
# know the minimum possible supported timestamp resolution is 1
# second.
minimum_resolution = max(1, minimum_resolution)
index = max(1, int(minimum_resolution / step))
while step * index < minimum_resolution:
# Floating point number representation errors may cause our
# initially calculated start index to be too small if calculated
# directly.
index += 1
while True:
# Do not simply add up the steps to avoid cumulative floating point
# number representation errors.
next = step * index
if next > 10:
raise TestEnvironmentError("File systems with too coarse "
"modification timestamp resolutions not supported.")
if test_time(next):
return next
index += 1
def __wait_for_time_change(self, path, touch, last_build_time):
"""
Wait until a newly assigned file system modification timestamp for
the given path is large enough for the timestamp difference between it
and the last build timestamp or the path's original file system
modification timestamp (depending on the last_build_time flag) to be
correctly recognized by both this Python based testing framework and
the Boost Jam executable being tested. May optionally touch the given
path to set its modification timestamp to the new value.
"""
assert self.last_build_timestamp or not last_build_time
stats_orig = os.stat(path)
if last_build_time:
start_time = self.last_build_timestamp
else:
start_time = stats_orig.st_mtime
build_resolution = self.__build_timestamp_resolution()
assert build_resolution >= 0
# Check whether the current timestamp is already new enough.
if stats_orig.st_mtime > start_time and (not build_resolution or
stats_orig.st_mtime >= start_time + build_resolution):
return
resolution = self.__python_timestamp_resolution(path, build_resolution)
assert resolution >= build_resolution
self.__wait_for_time_change_impl(path, start_time, resolution, build_resolution)
if not touch:
os.utime(path, (stats_orig.st_atime, stats_orig.st_mtime))
def __wait_for_time_change_impl(self, path, start_time, resolution, build_resolution):
# Implementation notes:
# * Theoretically time.sleep() API might get interrupted too soon
# (never actually encountered).
# * We encountered cases where we sleep just long enough for the
# filesystem's modifiction timestamp to change to the desired value,
# but after waking up, the read timestamp is still just a tiny bit
# too small (encountered on Windows). This is most likely caused by
# imprecise floating point timestamp & sleep interval representation
# used by Python. Note though that we never encountered a case where
# more than one additional tiny sleep() call was needed to remedy
# the situation.
# * We try to wait long enough for the timestamp to change, but do not
# want to waste processing time by waiting too long. The main
# problem is that when we have a coarse resolution, the actual times
# get rounded and we do not know the exact sleep time needed for the
# difference between two such times to pass. E.g. if we have a 1
# second resolution and the original and the current file timestamps
# are both 10 seconds then it could be that the current time is
# 10.99 seconds and that we can wait for just one hundredth of a
# second for the current file timestamp to reach its next value, and
# using a longer sleep interval than that would just be wasting
# time.
while True:
os.utime(path, None)
c = os.stat(path).st_mtime
if resolution:
if c > start_time and (not build_resolution or c >= start_time
+ build_resolution):
break
if c <= start_time - resolution:
# Move close to the desired timestamp in one sleep, but not
# close enough for timestamp rounding to potentially cause
# us to wait too long.
if start_time - c > 5:
if last_build_time:
error_message = ("Last build time recorded as "
"being a future event, causing a too long "
"wait period. Something must have played "
"around with the system clock.")
else:
error_message = ("Original path modification "
"timestamp set to far into the future or "
"something must have played around with the "
"system clock, causing a too long wait "
"period.\nPath: '%s'" % path)
raise TestEnvironmentError(message)
_sleep(start_time - c)
else:
# We are close to the desired timestamp so take baby sleeps
# to avoid sleeping too long.
_sleep(max(0.01, resolution / 10))
else:
if c > start_time:
break
_sleep(max(0.01, start_time - c))
class List:
def __init__(self, s=""):
elements = []
if s.__class__ is str:
# Have to handle escaped spaces correctly.
elements = s.replace("\ ", "\001").split()
else:
elements = s
self.l = [e.replace("\001", " ") for e in elements]
def __len__(self):
return len(self.l)
def __getitem__(self, key):
return self.l[key]
def __setitem__(self, key, value):
self.l[key] = value
def __delitem__(self, key):
del self.l[key]
def __str__(self):
return str(self.l)
def __repr__(self):
return "%s.List(%r)" % (self.__module__, " ".join(self.l))
def __mul__(self, other):
result = List()
if not isinstance(other, List):
other = List(other)
for f in self:
for s in other:
result.l.append(f + s)
return result
def __rmul__(self, other):
if not isinstance(other, List):
other = List(other)
return List.__mul__(other, self)
def __add__(self, other):
result = List()
result.l = self.l[:] + other.l[:]
return result
def _contains_lines(data, lines):
data_line_count = len(data)
expected_line_count = reduce(lambda x, y: x + len(y), lines, 0)
index = 0
for expected in lines:
if expected_line_count > data_line_count - index:
return False
expected_line_count -= len(expected)
index = _match_line_sequence(data, index, data_line_count -
expected_line_count, expected)
if index < 0:
return False
return True
def _match_line_sequence(data, start, end, lines):
if not lines:
return start
for index in xrange(start, end - len(lines) + 1):
data_index = index
for expected in lines:
if not fnmatch.fnmatch(data[data_index], expected):
break;
data_index += 1
else:
return data_index
return -1
def _sleep(delay):
if delay > 5:
raise TestEnvironmentError("Test environment error: sleep period of "
"more than 5 seconds requested. Most likely caused by a file with "
"its modification timestamp set to sometime in the future.")
time.sleep(delay)
###############################################################################
#
# Initialization.
#
###############################################################################
# Make os.stat() return file modification times as floats instead of integers
# to get the best possible file timestamp resolution available. The exact
# resolution depends on the underlying file system and the Python os.stat()
# implementation. The better the resolution we achieve, the shorter we need to
# wait for files we create to start getting new timestamps.
#
# Additional notes:
# * os.stat_float_times() function first introduced in Python 2.3. and
# suggested for deprecation in Python 3.3.
# * On Python versions 2.5+ we do not need to do this as there os.stat()
# returns floating point file modification times by default.
# * Windows CPython implementations prior to version 2.5 do not support file
# modification timestamp resolutions of less than 1 second no matter whether
# these timestamps are returned as integer or floating point values.
# * Python documentation states that this should be set in a program's
# __main__ module to avoid affecting other libraries that might not be ready
# to support floating point timestamps. Since we use no such external
# libraries, we ignore this warning to make it easier to enable this feature
# in both our single & multiple-test scripts.
if (2, 3) <= sys.version_info < (2, 5) and not os.stat_float_times():
os.stat_float_times(True)
# Quickie tests. Should use doctest instead.
if __name__ == "__main__":
assert str(List("foo bar") * "/baz") == "['foo/baz', 'bar/baz']"
assert repr("foo/" * List("bar baz")) == "__main__.List('foo/bar foo/baz')"
assert _contains_lines([], [])
assert _contains_lines([], [[]])
assert _contains_lines([], [[], []])
assert _contains_lines([], [[], [], []])
assert not _contains_lines([], [[""]])
assert not _contains_lines([], [["a"]])
assert _contains_lines([""], [])
assert _contains_lines(["a"], [])
assert _contains_lines(["a", "b"], [])
assert _contains_lines(["a", "b"], [[], [], []])
assert _contains_lines([""], [[""]])
assert not _contains_lines([""], [["a"]])
assert not _contains_lines(["a"], [[""]])
assert _contains_lines(["a", "", "b", ""], [["a"]])
assert _contains_lines(["a", "", "b", ""], [[""]])
assert _contains_lines(["a", "", "b"], [["b"]])
assert not _contains_lines(["a", "b"], [[""]])
assert not _contains_lines(["a", "", "b", ""], [["c"]])
assert _contains_lines(["a", "", "b", "x"], [["x"]])
data = ["1", "2", "3", "4", "5", "6", "7", "8", "9"]
assert _contains_lines(data, [["1", "2"]])
assert not _contains_lines(data, [["2", "1"]])
assert not _contains_lines(data, [["1", "3"]])
assert not _contains_lines(data, [["1", "3"]])
assert _contains_lines(data, [["1"], ["2"]])
assert _contains_lines(data, [["1"], [], [], [], ["2"]])
assert _contains_lines(data, [["1"], ["3"]])
assert not _contains_lines(data, [["3"], ["1"]])
assert _contains_lines(data, [["3"], ["7"], ["8"]])
assert not _contains_lines(data, [["1"], ["3", "5"]])
assert not _contains_lines(data, [["1"], [""], ["5"]])
assert not _contains_lines(data, [["1"], ["5"], ["3"]])
assert not _contains_lines(data, [["1"], ["5", "3"]])
assert not _contains_lines(data, [[" 3"]])
assert not _contains_lines(data, [["3 "]])
assert not _contains_lines(data, [["3", ""]])
assert not _contains_lines(data, [["", "3"]])
print("tests passed")
| nawawi/poedit | deps/boost/tools/build/test/BoostBuild.py | Python | mit | 53,880 |
import getopt
import os
import sys
import logging
from pcs import (
settings,
usage,
utils,
)
from pcs.cli.common import (
capabilities,
completion,
errors,
parse_args,
routing,
)
from pcs.cli.reports import process_library_reports, output
from pcs.cli.routing import (
acl,
alert,
booth,
client,
cluster,
config,
constraint,
dr,
host,
node,
pcsd,
prop,
qdevice,
quorum,
resource,
status,
stonith,
tag,
)
from pcs.lib.errors import LibraryError
def _non_root_run(argv_cmd):
"""
This function will run commands which has to be run as root for users which
are not root. If it required to run such command as root it will do that by
sending it to the local pcsd and then it will exit.
"""
# matching the commands both in here and in pcsd expects -o and --options
# to be at the end of a command
argv_and_options = argv_cmd[:]
for option, value in utils.pcs_options.items():
if parse_args.is_option_expecting_value(option):
argv_and_options.extend([option, value])
else:
argv_and_options.append(option)
# specific commands need to be run under root account, pass them to pcsd
# don't forget to allow each command in pcsd.rb in "post /run_pcs do"
root_command_list = [
["cluster", "auth", "..."],
["cluster", "corosync", "..."],
["cluster", "destroy", "..."],
["cluster", "disable", "..."],
["cluster", "enable", "..."],
["cluster", "node", "..."],
["cluster", "pcsd-status", "..."], # TODO deprecated, remove command
["cluster", "start", "..."],
["cluster", "stop", "..."],
["cluster", "sync", "..."],
# ['config', 'restore', '...'], # handled in config.config_restore
["host", "auth", "..."],
["host", "deauth", "..."],
["pcsd", "deauth", "..."],
["pcsd", "status", "..."],
["pcsd", "sync-certificates"],
["quorum", "device", "status", "..."],
["quorum", "status", "..."],
["status"],
["status", "corosync", "..."],
["status", "pcsd", "..."],
["status", "quorum", "..."],
["status", "status", "..."],
]
for root_cmd in root_command_list:
if (argv_and_options == root_cmd) or (
root_cmd[-1] == "..."
and argv_and_options[: len(root_cmd) - 1] == root_cmd[:-1]
):
# handle interactivity of 'pcs cluster auth'
if argv_and_options[0:2] in [["cluster", "auth"], ["host", "auth"]]:
if "-u" not in utils.pcs_options:
username = utils.get_terminal_input("Username: ")
argv_and_options.extend(["-u", username])
if "-p" not in utils.pcs_options:
password = utils.get_terminal_password()
argv_and_options.extend(["-p", password])
# call the local pcsd
err_msgs, exitcode, std_out, std_err = utils.call_local_pcsd(
argv_and_options
)
if err_msgs:
for msg in err_msgs:
utils.err(msg, False)
sys.exit(1)
if std_out.strip():
print(std_out)
if std_err.strip():
sys.stderr.write(std_err)
sys.exit(exitcode)
usefile = False
filename = ""
def main(argv=None):
# pylint: disable=global-statement
# pylint: disable=too-many-branches
# pylint: disable=too-many-locals
# pylint: disable=too-many-statements
if completion.has_applicable_environment(os.environ):
print(
completion.make_suggestions(
os.environ, usage.generate_completion_tree_from_usage()
)
)
sys.exit()
argv = argv if argv else sys.argv[1:]
utils.subprocess_setup()
global filename, usefile
utils.pcs_options = {}
# we want to support optional arguments for --wait, so if an argument
# is specified with --wait (ie. --wait=30) then we use them
waitsecs = None
new_argv = []
for arg in argv:
if arg.startswith("--wait="):
tempsecs = arg.replace("--wait=", "")
if tempsecs:
waitsecs = tempsecs
arg = "--wait"
new_argv.append(arg)
argv = new_argv
try:
if "--" in argv:
pcs_options, argv = getopt.gnu_getopt(
argv, parse_args.PCS_SHORT_OPTIONS, parse_args.PCS_LONG_OPTIONS
)
else:
# DEPRECATED
# TODO remove
# We want to support only the -- version
(
args_without_negative_nums,
args_filtered_out,
) = parse_args.filter_out_non_option_negative_numbers(argv)
if args_filtered_out:
options_str = "', '".join(args_filtered_out)
output.warn(
f"Using '{options_str}' without '--' is deprecated, those "
"parameters will be considered position independent "
"options in future pcs versions"
)
pcs_options, dummy_argv = getopt.gnu_getopt(
args_without_negative_nums,
parse_args.PCS_SHORT_OPTIONS,
parse_args.PCS_LONG_OPTIONS,
)
argv = parse_args.filter_out_options(argv)
except getopt.GetoptError as err:
usage.main()
print(err)
if err.opt in {"V", "clone", "device", "watchdog"}:
# Print error messages which point users to the changes section in
# pcs manpage.
# TODO remove
# To be removed in the next significant version.
print(f"Hint: {errors.HINT_SYNTAX_CHANGE}")
sys.exit(1)
full = False
for option, dummy_value in pcs_options:
if option == "--full":
full = True
break
for opt, val in pcs_options:
if not opt in utils.pcs_options:
utils.pcs_options[opt] = val
else:
# If any options are a list then they've been entered twice which
# isn't valid
utils.err("%s can only be used once" % opt)
if opt in ("-h", "--help"):
if not argv:
usage.main()
sys.exit()
else:
argv = [argv[0], "help"] + argv[1:]
elif opt == "-f":
usefile = True
filename = val
utils.usefile = usefile
utils.filename = filename
elif opt == "--corosync_conf":
settings.corosync_conf_file = val
elif opt == "--version":
print(settings.pcs_version)
if full:
print(
" ".join(
sorted(
[
feat["id"]
for feat in capabilities.get_pcs_capabilities()
]
)
)
)
sys.exit()
elif opt == "--fullhelp":
usage.full_usage()
sys.exit()
elif opt == "--wait":
utils.pcs_options[opt] = waitsecs
elif opt == "--request-timeout":
request_timeout_valid = False
try:
timeout = int(val)
if timeout > 0:
utils.pcs_options[opt] = timeout
request_timeout_valid = True
except ValueError:
pass
if not request_timeout_valid:
utils.err(
(
"'{0}' is not a valid --request-timeout value, use "
"a positive integer"
).format(val)
)
# initialize logger
logging.getLogger("pcs")
if (os.getuid() != 0) and (argv and argv[0] != "help") and not usefile:
_non_root_run(argv)
cmd_map = {
"resource": resource.resource_cmd,
"cluster": cluster.cluster_cmd,
"stonith": stonith.stonith_cmd,
"property": prop.property_cmd,
"constraint": constraint.constraint_cmd,
"acl": acl.acl_cmd,
"status": status.status_cmd,
"config": config.config_cmd,
"pcsd": pcsd.pcsd_cmd,
"node": node.node_cmd,
"quorum": quorum.quorum_cmd,
"qdevice": qdevice.qdevice_cmd,
"alert": alert.alert_cmd,
"booth": booth.booth_cmd,
"host": host.host_cmd,
"client": client.client_cmd,
"dr": dr.dr_cmd,
"tag": tag.tag_cmd,
"help": lambda lib, argv, modifiers: usage.main(),
}
try:
routing.create_router(cmd_map, [])(
utils.get_library_wrapper(), argv, utils.get_input_modifiers()
)
except LibraryError as e:
process_library_reports(e.args)
except errors.CmdLineInputError:
if argv and argv[0] in cmd_map:
usage.show(argv[0], [])
else:
usage.main()
sys.exit(1)
| feist/pcs | pcs/app.py | Python | gpl-2.0 | 9,230 |
from collections import OrderedDict
import json
from django.db import models
from django.contrib.postgres.fields import JSONField as Builtin_JSONField
from django.core.serializers.json import Serializer as Builtin_Serializer
from django.utils.encoding import smart_text
class JSONField(Builtin_JSONField):
def value_from_object(self, obj):
value = getattr(obj, self.attname)
if value:
return json.loads(value)
else:
return None
| Fakor/congov | web/values/models.py | Python | mit | 484 |
#!/usr/bin/env python
""" sha1Hash_test.py
Unit tests for sha1.py
"""
from crypto.hash.sha1Hash import SHA1
import unittest
import struct
assert struct.calcsize('!IIIII') == 20, '5 integers should be 20 bytes'
class SHA1_FIPS180_TestCases(unittest.TestCase):
""" SHA-1 tests from FIPS180-1 Appendix A, B and C """
def testFIPS180_1_Appendix_A(self):
""" APPENDIX A. A SAMPLE MESSAGE AND ITS MESSAGE DIGEST """
hashAlg = SHA1()
message = 'abc'
message_digest = 0xA9993E36L, 0x4706816AL, 0xBA3E2571L, 0x7850C26CL, 0x9CD0D89DL
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix A test Failed'
def testFIPS180_1_Appendix_B(self):
""" APPENDIX B. A SECOND SAMPLE MESSAGE AND ITS MESSAGE DIGEST """
hashAlg = SHA1()
message = 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'
message_digest = 0x84983E44L, 0x1C3BD26EL, 0xBAAE4AA1L, 0xF95129E5L, 0xE54670F1L
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix B test Failed'
def testFIPS180_1_Appendix_C(self):
""" APPENDIX C. A THIRD SAMPLE MESSAGE AND ITS MESSAGE DIGEST
Let the message be the binary-coded form of the ASCII string which consists
of 1,000,000 repetitions of "a". """
hashAlg = SHA1()
message = 1000000*'a'
message_digest = 0x34AA973CL, 0xD4C4DAA4L, 0xF61EEB2BL, 0xDBAD2731L, 0x6534016FL
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix C test Failed'
def _toBlock(binaryString):
""" Convert binary string to blocks of 5 words of uint32() """
return [uint32(word) for word in struct.unpack('!IIIII', binaryString)]
def _toBString(block):
""" Convert block (5 words of 32 bits to binary string """
return ''.join([struct.pack('!I',word) for word in block])
if __name__ == '__main__':
# Run the tests from the command line
unittest.main()
| realms-team/basestation-fw | libs/smartmeshsdk-REL-1.3.0.1/external_libs/cryptopy/crypto/hash/sha1Hash_test.py | Python | bsd-3-clause | 2,119 |
import json
from corehq.apps.api.models import ApiUser, PERMISSION_POST_SMS
from corehq.apps.domain.models import Domain
from corehq.apps.hqcase.utils import update_case
from corehq.apps.sms.api import (send_sms, send_sms_to_verified_number,
send_sms_with_backend, send_sms_with_backend_name)
from corehq.apps.sms.mixin import BadSMSConfigException
from corehq.apps.sms.models import (SMS, QueuedSMS,
SQLMobileBackendMapping, SQLMobileBackend, MobileBackendInvitation,
PhoneLoadBalancingMixin, BackendMap)
from corehq.apps.sms.tasks import handle_outgoing
from corehq.apps.sms.tests.util import BaseSMSTest, delete_domain_phone_numbers
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.form_processor.tests.utils import run_with_all_backends
from corehq.messaging.smsbackends.apposit.models import SQLAppositBackend
from corehq.messaging.smsbackends.grapevine.models import SQLGrapevineBackend
from corehq.messaging.smsbackends.http.models import SQLHttpBackend
from corehq.messaging.smsbackends.mach.models import SQLMachBackend
from corehq.messaging.smsbackends.megamobile.models import SQLMegamobileBackend
from corehq.messaging.smsbackends.push.models import PushBackend
from corehq.messaging.smsbackends.sislog.models import SQLSislogBackend
from corehq.messaging.smsbackends.smsgh.models import SQLSMSGHBackend
from corehq.messaging.smsbackends.telerivet.models import SQLTelerivetBackend
from corehq.messaging.smsbackends.test.models import SQLTestSMSBackend
from corehq.messaging.smsbackends.tropo.models import SQLTropoBackend
from corehq.messaging.smsbackends.twilio.models import SQLTwilioBackend
from corehq.messaging.smsbackends.unicel.models import SQLUnicelBackend, InboundParams
from corehq.messaging.smsbackends.yo.models import SQLYoBackend
from corehq.util.test_utils import create_test_case
from datetime import datetime
from dimagi.utils.couch.cache.cache_core import get_redis_client
from django.test import TestCase
from django.test.client import Client
from django.test.utils import override_settings
from mock import patch
from urllib import urlencode
class AllBackendTest(BaseSMSTest):
def setUp(self):
super(AllBackendTest, self).setUp()
self.domain_obj = Domain(name='all-backend-test')
self.domain_obj.save()
self.create_account_and_subscription(self.domain_obj.name)
self.domain_obj = Domain.get(self.domain_obj.get_id)
self.test_phone_number = '99912345'
self.unicel_backend = SQLUnicelBackend(
name='UNICEL',
is_global=True,
hq_api_id=SQLUnicelBackend.get_api_id()
)
self.unicel_backend.save()
self.mach_backend = SQLMachBackend(
name='MACH',
is_global=True,
hq_api_id=SQLMachBackend.get_api_id()
)
self.mach_backend.save()
self.tropo_backend = SQLTropoBackend(
name='TROPO',
is_global=True,
hq_api_id=SQLTropoBackend.get_api_id()
)
self.tropo_backend.save()
self.http_backend = SQLHttpBackend(
name='HTTP',
is_global=True,
hq_api_id=SQLHttpBackend.get_api_id()
)
self.http_backend.save()
self.telerivet_backend = SQLTelerivetBackend(
name='TELERIVET',
is_global=True,
hq_api_id=SQLTelerivetBackend.get_api_id()
)
self.telerivet_backend.set_extra_fields(
**dict(
webhook_secret='telerivet-webhook-secret'
)
)
self.telerivet_backend.save()
self.test_backend = SQLTestSMSBackend(
name='TEST',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id()
)
self.test_backend.save()
self.grapevine_backend = SQLGrapevineBackend(
name='GRAPEVINE',
is_global=True,
hq_api_id=SQLGrapevineBackend.get_api_id()
)
self.grapevine_backend.save()
self.twilio_backend = SQLTwilioBackend(
name='TWILIO',
is_global=True,
hq_api_id=SQLTwilioBackend.get_api_id()
)
self.twilio_backend.save()
self.megamobile_backend = SQLMegamobileBackend(
name='MEGAMOBILE',
is_global=True,
hq_api_id=SQLMegamobileBackend.get_api_id()
)
self.megamobile_backend.save()
self.smsgh_backend = SQLSMSGHBackend(
name='SMSGH',
is_global=True,
hq_api_id=SQLSMSGHBackend.get_api_id()
)
self.smsgh_backend.save()
self.apposit_backend = SQLAppositBackend(
name='APPOSIT',
is_global=True,
hq_api_id=SQLAppositBackend.get_api_id()
)
self.apposit_backend.save()
self.sislog_backend = SQLSislogBackend(
name='SISLOG',
is_global=True,
hq_api_id=SQLSislogBackend.get_api_id()
)
self.sislog_backend.save()
self.yo_backend = SQLYoBackend(
name='YO',
is_global=True,
hq_api_id=SQLYoBackend.get_api_id()
)
self.yo_backend.save()
self.push_backend = PushBackend(
name='PUSH',
is_global=True,
hq_api_id=PushBackend.get_api_id()
)
self.push_backend.save()
def _test_outbound_backend(self, backend, msg_text, mock_send):
SQLMobileBackendMapping.set_default_domain_backend(self.domain_obj.name, backend)
send_sms(self.domain_obj.name, None, self.test_phone_number, msg_text)
sms = SMS.objects.get(
domain=self.domain_obj.name,
direction='O',
text=msg_text
)
self.assertTrue(mock_send.called)
msg_arg = mock_send.call_args[0][0]
self.assertEqual(msg_arg.date, sms.date)
self.assertEqual(sms.backend_api, backend.hq_api_id)
self.assertEqual(sms.backend_id, backend.couch_id)
def _verify_inbound_request(self, backend_api_id, msg_text, backend_couch_id=None):
sms = SMS.objects.get(
domain=self.domain_obj.name,
direction='I',
text=msg_text
)
self.assertEqual(sms.backend_api, backend_api_id)
if backend_couch_id:
self.assertEqual(sms.backend_id, backend_couch_id)
def _simulate_inbound_request_with_payload(self, url,
content_type, payload):
with create_test_case(
self.domain_obj.name,
'participant',
'contact',
case_properties={
'contact_phone_number': self.test_phone_number,
'contact_phone_number_is_verified': '1',
},
drop_signals=False):
response = Client().post(url, payload, content_type=content_type)
self.assertEqual(response.status_code, 200)
def _simulate_inbound_request(self, url, phone_param,
msg_param, msg_text, post=False, additional_params=None,
expected_response_code=200, is_megamobile=False):
fcn = Client().post if post else Client().get
payload = {
phone_param: self.test_phone_number,
msg_param: msg_text,
}
if additional_params:
payload.update(additional_params)
contact_phone_prefix = '63' if is_megamobile else ''
with create_test_case(
self.domain_obj.name,
'participant',
'contact',
case_properties={
'contact_phone_number': contact_phone_prefix + self.test_phone_number,
'contact_phone_number_is_verified': '1',
},
drop_signals=False):
response = fcn(url, payload)
self.assertEqual(response.status_code, expected_response_code)
@patch('corehq.messaging.smsbackends.unicel.models.SQLUnicelBackend.send')
@patch('corehq.messaging.smsbackends.mach.models.SQLMachBackend.send')
@patch('corehq.messaging.smsbackends.tropo.models.SQLTropoBackend.send')
@patch('corehq.messaging.smsbackends.http.models.SQLHttpBackend.send')
@patch('corehq.messaging.smsbackends.telerivet.models.SQLTelerivetBackend.send')
@patch('corehq.messaging.smsbackends.test.models.SQLTestSMSBackend.send')
@patch('corehq.messaging.smsbackends.grapevine.models.SQLGrapevineBackend.send')
@patch('corehq.messaging.smsbackends.twilio.models.SQLTwilioBackend.send')
@patch('corehq.messaging.smsbackends.megamobile.models.SQLMegamobileBackend.send')
@patch('corehq.messaging.smsbackends.smsgh.models.SQLSMSGHBackend.send')
@patch('corehq.messaging.smsbackends.apposit.models.SQLAppositBackend.send')
@patch('corehq.messaging.smsbackends.sislog.models.SQLSislogBackend.send')
@patch('corehq.messaging.smsbackends.yo.models.SQLYoBackend.send')
@patch('corehq.messaging.smsbackends.push.models.PushBackend.send')
def test_outbound_sms(
self,
push_send,
yo_send,
sislog_send,
apposit_send,
smsgh_send,
megamobile_send,
twilio_send,
grapevine_send,
test_send,
telerivet_send,
http_send,
tropo_send,
mach_send,
unicel_send):
self._test_outbound_backend(self.unicel_backend, 'unicel test', unicel_send)
self._test_outbound_backend(self.mach_backend, 'mach test', mach_send)
self._test_outbound_backend(self.tropo_backend, 'tropo test', tropo_send)
self._test_outbound_backend(self.http_backend, 'http test', http_send)
self._test_outbound_backend(self.telerivet_backend, 'telerivet test', telerivet_send)
self._test_outbound_backend(self.test_backend, 'test test', test_send)
self._test_outbound_backend(self.grapevine_backend, 'grapevine test', grapevine_send)
self._test_outbound_backend(self.twilio_backend, 'twilio test', twilio_send)
self._test_outbound_backend(self.megamobile_backend, 'megamobile test', megamobile_send)
self._test_outbound_backend(self.smsgh_backend, 'smsgh test', smsgh_send)
self._test_outbound_backend(self.apposit_backend, 'apposit test', apposit_send)
self._test_outbound_backend(self.sislog_backend, 'sislog test', sislog_send)
self._test_outbound_backend(self.yo_backend, 'yo test', yo_send)
self._test_outbound_backend(self.push_backend, 'push test', push_send)
@run_with_all_backends
def test_unicel_inbound_sms(self):
self._simulate_inbound_request('/unicel/in/', phone_param=InboundParams.SENDER,
msg_param=InboundParams.MESSAGE, msg_text='unicel test')
self._verify_inbound_request(self.unicel_backend.get_api_id(), 'unicel test')
@run_with_all_backends
def test_tropo_inbound_sms(self):
tropo_data = {'session': {'from': {'id': self.test_phone_number}, 'initialText': 'tropo test'}}
self._simulate_inbound_request_with_payload('/tropo/sms/',
content_type='text/json', payload=json.dumps(tropo_data))
self._verify_inbound_request(self.tropo_backend.get_api_id(), 'tropo test')
@run_with_all_backends
def test_telerivet_inbound_sms(self):
additional_params = {
'event': 'incoming_message',
'message_type': 'sms',
'secret': self.telerivet_backend.config.webhook_secret
}
self._simulate_inbound_request('/telerivet/in/', phone_param='from_number_e164',
msg_param='content', msg_text='telerivet test', post=True,
additional_params=additional_params)
self._verify_inbound_request(self.telerivet_backend.get_api_id(), 'telerivet test')
@run_with_all_backends
@override_settings(SIMPLE_API_KEYS={'grapevine-test': 'grapevine-api-key'})
def test_grapevine_inbound_sms(self):
xml = """
<gviSms>
<smsDateTime>2015-10-12T12:00:00</smsDateTime>
<cellNumber>99912345</cellNumber>
<content>grapevine test</content>
</gviSms>
"""
payload = urlencode({'XML': xml})
self._simulate_inbound_request_with_payload(
'/gvi/api/sms/?apiuser=grapevine-test&apikey=grapevine-api-key',
content_type='application/x-www-form-urlencoded', payload=payload)
self._verify_inbound_request(self.grapevine_backend.get_api_id(), 'grapevine test')
@run_with_all_backends
def test_twilio_inbound_sms(self):
url = '/twilio/sms/%s' % self.twilio_backend.inbound_api_key
self._simulate_inbound_request(url, phone_param='From',
msg_param='Body', msg_text='twilio test', post=True)
self._verify_inbound_request(self.twilio_backend.get_api_id(), 'twilio test',
backend_couch_id=self.twilio_backend.couch_id)
@run_with_all_backends
def test_twilio_401_response(self):
start_count = SMS.objects.count()
self._simulate_inbound_request('/twilio/sms/xxxxx', phone_param='From',
msg_param='Body', msg_text='twilio test', post=True,
expected_response_code=401)
end_count = SMS.objects.count()
self.assertEqual(start_count, end_count)
@run_with_all_backends
def test_megamobile_inbound_sms(self):
self._simulate_inbound_request('/megamobile/sms/', phone_param='cel',
msg_param='msg', msg_text='megamobile test', is_megamobile=True)
self._verify_inbound_request(self.megamobile_backend.get_api_id(), 'megamobile test')
@run_with_all_backends
def test_sislog_inbound_sms(self):
self._simulate_inbound_request('/sislog/in/', phone_param='sender',
msg_param='msgdata', msg_text='sislog test')
self._verify_inbound_request(self.sislog_backend.get_api_id(), 'sislog test')
@run_with_all_backends
def test_yo_inbound_sms(self):
self._simulate_inbound_request('/yo/sms/', phone_param='sender',
msg_param='message', msg_text='yo test')
self._verify_inbound_request(self.yo_backend.get_api_id(), 'yo test')
@run_with_all_backends
def test_smsgh_inbound_sms(self):
user = ApiUser.create('smsgh-api-key', 'smsgh-api-key', permissions=[PERMISSION_POST_SMS])
user.save()
self._simulate_inbound_request('/smsgh/sms/smsgh-api-key/', phone_param='snr',
msg_param='msg', msg_text='smsgh test')
self._verify_inbound_request('SMSGH', 'smsgh test')
user.delete()
@run_with_all_backends
def test_apposit_inbound_sms(self):
self._simulate_inbound_request_with_payload(
'/apposit/in/%s/' % self.apposit_backend.inbound_api_key,
'application/json',
json.dumps({
'from': self.test_phone_number,
'message': 'apposit test',
})
)
self._verify_inbound_request('APPOSIT', 'apposit test',
backend_couch_id=self.apposit_backend.couch_id)
@run_with_all_backends
def test_push_inbound_sms(self):
xml = """<?xml version="1.0" encoding="UTF-8"?>
<bspostevent>
<field name="MobileNumber" type="string">99912345</field>
<field name="Text" type="string">push test</field>
</bspostevent>
"""
self._simulate_inbound_request_with_payload(
'/push/sms/%s/' % self.push_backend.inbound_api_key,
content_type='application/xml', payload=xml)
self._verify_inbound_request(self.push_backend.get_api_id(), 'push test',
backend_couch_id=self.push_backend.couch_id)
def tearDown(self):
delete_domain_phone_numbers(self.domain_obj.name)
self.domain_obj.delete()
self.unicel_backend.delete()
self.mach_backend.delete()
self.tropo_backend.delete()
self.http_backend.delete()
self.telerivet_backend.delete()
self.test_backend.delete()
self.grapevine_backend.delete()
self.twilio_backend.delete()
self.megamobile_backend.delete()
self.smsgh_backend.delete()
self.apposit_backend.delete()
self.sislog_backend.delete()
self.yo_backend.delete()
self.push_backend.delete()
super(AllBackendTest, self).tearDown()
class OutgoingFrameworkTestCase(BaseSMSTest):
def setUp(self):
super(OutgoingFrameworkTestCase, self).setUp()
self.domain = "test-domain"
self.domain2 = "test-domain2"
self.domain_obj = Domain(name=self.domain)
self.domain_obj.save()
self.create_account_and_subscription(self.domain_obj.name)
self.domain_obj = Domain.get(self.domain_obj._id)
self.backend1 = SQLTestSMSBackend.objects.create(
name='BACKEND1',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id()
)
self.backend2 = SQLTestSMSBackend.objects.create(
name='BACKEND2',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id()
)
self.backend3 = SQLTestSMSBackend.objects.create(
name='BACKEND3',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id()
)
self.backend4 = SQLTestSMSBackend.objects.create(
name='BACKEND4',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id()
)
self.backend5 = SQLTestSMSBackend.objects.create(
name='BACKEND5',
domain=self.domain,
is_global=False,
hq_api_id=SQLTestSMSBackend.get_api_id()
)
self.backend6 = SQLTestSMSBackend.objects.create(
name='BACKEND6',
domain=self.domain2,
is_global=False,
hq_api_id=SQLTestSMSBackend.get_api_id()
)
self.backend6.set_shared_domains([self.domain])
self.backend7 = SQLTestSMSBackend.objects.create(
name='BACKEND7',
domain=self.domain2,
is_global=False,
hq_api_id=SQLTestSMSBackend.get_api_id()
)
self.backend8 = SQLTestSMSBackend.objects.create(
name='BACKEND',
domain=self.domain,
is_global=False,
hq_api_id=SQLTestSMSBackend.get_api_id()
)
self.backend9 = SQLTestSMSBackend.objects.create(
name='BACKEND',
domain=self.domain2,
is_global=False,
hq_api_id=SQLTestSMSBackend.get_api_id()
)
self.backend9.set_shared_domains([self.domain])
self.backend10 = SQLTestSMSBackend.objects.create(
name='BACKEND',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id()
)
self.backend_mapping1 = SQLMobileBackendMapping.objects.create(
is_global=True,
backend_type=SQLMobileBackend.SMS,
prefix='*',
backend=self.backend1
)
self.backend_mapping2 = SQLMobileBackendMapping.objects.create(
is_global=True,
backend_type=SQLMobileBackend.SMS,
prefix='1',
backend=self.backend2
)
self.backend_mapping3 = SQLMobileBackendMapping.objects.create(
is_global=True,
backend_type=SQLMobileBackend.SMS,
prefix='91',
backend=self.backend3
)
self.backend_mapping4 = SQLMobileBackendMapping.objects.create(
is_global=True,
backend_type=SQLMobileBackend.SMS,
prefix='265',
backend=self.backend4
)
self.backend_mapping5 = SQLMobileBackendMapping.objects.create(
is_global=True,
backend_type=SQLMobileBackend.SMS,
prefix='256',
backend=self.backend5
)
self.backend_mapping6 = SQLMobileBackendMapping.objects.create(
is_global=True,
backend_type=SQLMobileBackend.SMS,
prefix='25670',
backend=self.backend6
)
self.backend_mapping7 = SQLMobileBackendMapping.objects.create(
is_global=True,
backend_type=SQLMobileBackend.SMS,
prefix='25675',
backend=self.backend7
)
def tearDown(self):
delete_domain_phone_numbers(self.domain)
delete_domain_phone_numbers(self.domain2)
for obj in (
list(MobileBackendInvitation.objects.all()) +
list(SQLMobileBackendMapping.objects.all())
):
# For now we can't do bulk delete because we need to have the
# delete sync with couch
obj.delete()
self.backend1.delete()
self.backend2.delete()
self.backend3.delete()
self.backend4.delete()
self.backend5.delete()
self.backend6.delete()
self.backend7.delete()
self.backend8.delete()
self.backend9.delete()
self.backend10.delete()
self.domain_obj.delete()
super(OutgoingFrameworkTestCase, self).tearDown()
def test_multiple_country_prefixes(self):
self.assertEqual(
SQLMobileBackend.load_default_by_phone_and_domain(
SQLMobileBackend.SMS,
'256800000000'
).pk,
self.backend5.pk
)
self.assertEqual(
SQLMobileBackend.load_default_by_phone_and_domain(
SQLMobileBackend.SMS,
'256700000000'
).pk,
self.backend6.pk
)
self.assertEqual(
SQLMobileBackend.load_default_by_phone_and_domain(
SQLMobileBackend.SMS,
'256750000000'
).pk,
self.backend7.pk
)
def __test_global_backend_map(self):
with patch(
'corehq.messaging.smsbackends.test.models.SQLTestSMSBackend.send',
autospec=True
) as mock_send:
self.assertTrue(send_sms(self.domain, None, '15551234567', 'Test for BACKEND2'))
self.assertEqual(mock_send.call_count, 1)
self.assertEqual(mock_send.call_args[0][0].pk, self.backend2.pk)
with patch(
'corehq.messaging.smsbackends.test.models.SQLTestSMSBackend.send',
autospec=True
) as mock_send:
self.assertTrue(send_sms(self.domain, None, '9100000000', 'Test for BACKEND3'))
self.assertEqual(mock_send.call_count, 1)
self.assertEqual(mock_send.call_args[0][0].pk, self.backend3.pk)
with patch(
'corehq.messaging.smsbackends.test.models.SQLTestSMSBackend.send',
autospec=True
) as mock_send:
self.assertTrue(send_sms(self.domain, None, '26500000000', 'Test for BACKEND4'))
self.assertEqual(mock_send.call_count, 1)
self.assertEqual(mock_send.call_args[0][0].pk, self.backend4.pk)
with patch(
'corehq.messaging.smsbackends.test.models.SQLTestSMSBackend.send',
autospec=True
) as mock_send:
self.assertTrue(send_sms(self.domain, None, '25800000000', 'Test for BACKEND1'))
self.assertEqual(mock_send.call_count, 1)
self.assertEqual(mock_send.call_args[0][0].pk, self.backend1.pk)
def __test_domain_default(self):
# Test overriding with domain-level backend
SQLMobileBackendMapping.set_default_domain_backend(self.domain, self.backend5)
with patch(
'corehq.messaging.smsbackends.test.models.SQLTestSMSBackend.send',
autospec=True
) as mock_send:
self.assertTrue(send_sms(self.domain, None, '15551234567', 'Test for BACKEND5'))
self.assertEqual(mock_send.call_count, 1)
self.assertEqual(mock_send.call_args[0][0].pk, self.backend5.pk)
def __test_shared_backend(self):
# Test use of backend that another domain owns but has granted access
SQLMobileBackendMapping.set_default_domain_backend(self.domain, self.backend6)
with patch(
'corehq.messaging.smsbackends.test.models.SQLTestSMSBackend.send',
autospec=True
) as mock_send:
self.assertTrue(send_sms(self.domain, None, '25800000000', 'Test for BACKEND6'))
self.assertEqual(mock_send.call_count, 1)
self.assertEqual(mock_send.call_args[0][0].pk, self.backend6.pk)
# Test trying to use a backend that another domain owns but has not granted access
SQLMobileBackendMapping.set_default_domain_backend(self.domain, self.backend7)
with patch(
'corehq.messaging.smsbackends.test.models.SQLTestSMSBackend.send',
autospec=True
) as mock_send:
self.assertFalse(send_sms(self.domain, None, '25800000000', 'Test Unauthorized'))
self.assertEqual(mock_send.call_count, 0)
def __test_verified_number_with_map(self, contact):
# Test sending to verified number with backend map
SQLMobileBackendMapping.unset_default_domain_backend(self.domain)
verified_number = contact.get_verified_number()
self.assertTrue(verified_number is not None)
self.assertTrue(verified_number.backend_id is None)
self.assertEqual(verified_number.phone_number, '15551234567')
with patch(
'corehq.messaging.smsbackends.test.models.SQLTestSMSBackend.send',
autospec=True
) as mock_send:
self.assertTrue(send_sms_to_verified_number(verified_number, 'Test for BACKEND2'))
self.assertEqual(mock_send.call_count, 1)
self.assertEqual(mock_send.call_args[0][0].pk, self.backend2.pk)
# Test sending to verified number with default domain backend
SQLMobileBackendMapping.set_default_domain_backend(self.domain, self.backend5)
with patch(
'corehq.messaging.smsbackends.test.models.SQLTestSMSBackend.send',
autospec=True
) as mock_send:
self.assertTrue(send_sms_to_verified_number(verified_number, 'Test for BACKEND5'))
self.assertEqual(mock_send.call_count, 1)
self.assertEqual(mock_send.call_args[0][0].pk, self.backend5.pk)
def __test_contact_level_backend(self, contact):
# Test sending to verified number with a contact-level backend owned by the domain
update_case(self.domain, contact.case_id, case_properties={'contact_backend_id': 'BACKEND'})
contact = CaseAccessors(self.domain).get_case(contact.case_id)
verified_number = contact.get_verified_number()
self.assertTrue(verified_number is not None)
self.assertEqual(verified_number.backend_id, 'BACKEND')
self.assertEqual(verified_number.phone_number, '15551234567')
with patch(
'corehq.messaging.smsbackends.test.models.SQLTestSMSBackend.send',
autospec=True
) as mock_send:
self.assertTrue(send_sms_to_verified_number(verified_number, 'Test for domain BACKEND'))
self.assertEqual(mock_send.call_count, 1)
self.assertEqual(mock_send.call_args[0][0].pk, self.backend8.pk)
# Test sending to verified number with a contact-level backend granted to the domain by another domain
self.backend8.name = 'BACKEND8'
self.backend8.save()
with patch(
'corehq.messaging.smsbackends.test.models.SQLTestSMSBackend.send',
autospec=True
) as mock_send:
self.assertTrue(send_sms_to_verified_number(verified_number, 'Test for shared domain BACKEND'))
self.assertEqual(mock_send.call_count, 1)
self.assertEqual(mock_send.call_args[0][0].pk, self.backend9.pk)
# Test sending to verified number with a contact-level global backend
self.backend9.name = 'BACKEND9'
self.backend9.save()
with patch(
'corehq.messaging.smsbackends.test.models.SQLTestSMSBackend.send',
autospec=True
) as mock_send:
self.assertTrue(send_sms_to_verified_number(verified_number, 'Test for global BACKEND'))
self.assertEqual(mock_send.call_count, 1)
self.assertEqual(mock_send.call_args[0][0].pk, self.backend10.pk)
# Test raising exception if contact-level backend is not found
self.backend10.name = 'BACKEND10'
self.backend10.save()
with self.assertRaises(BadSMSConfigException):
send_sms_to_verified_number(verified_number, 'Test for unknown BACKEND')
def __test_send_sms_with_backend(self):
with patch(
'corehq.messaging.smsbackends.test.models.SQLTestSMSBackend.send',
autospec=True
) as mock_send:
self.assertTrue(
send_sms_with_backend(self.domain, '+15551234567', 'Test for BACKEND3', self.backend3.couch_id)
)
self.assertEqual(mock_send.call_count, 1)
self.assertEqual(mock_send.call_args[0][0].pk, self.backend3.pk)
def __test_send_sms_with_backend_name(self):
with patch(
'corehq.messaging.smsbackends.test.models.SQLTestSMSBackend.send',
autospec=True
) as mock_send:
self.assertTrue(
send_sms_with_backend_name(self.domain, '+15551234567', 'Test for BACKEND3', 'BACKEND3')
)
self.assertEqual(mock_send.call_count, 1)
self.assertEqual(mock_send.call_args[0][0].pk, self.backend3.pk)
def test_choosing_appropriate_backend_for_outgoing(self):
with create_test_case(
self.domain,
'participant',
'contact',
case_properties={
'contact_phone_number': '15551234567',
'contact_phone_number_is_verified': '1',
},
drop_signals=False) as contact:
self.__test_global_backend_map()
self.__test_domain_default()
self.__test_shared_backend()
self.__test_verified_number_with_map(contact)
self.__test_contact_level_backend(contact)
self.__test_send_sms_with_backend()
self.__test_send_sms_with_backend_name()
class SQLMobileBackendTestCase(TestCase):
def assertBackendsEqual(self, backend1, backend2):
self.assertEqual(backend1.pk, backend2.pk)
self.assertEqual(backend1.__class__, backend2.__class__)
def test_domain_is_shared(self):
backend = SQLTestSMSBackend.objects.create(
name='BACKEND',
domain='shared-test-1',
is_global=False,
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
self.assertFalse(backend.domain_is_shared('shared-test-2'))
backend.set_shared_domains(['shared-test-2'])
self.assertTrue(backend.domain_is_shared('shared-test-2'))
backend.soft_delete()
self.assertFalse(backend.domain_is_shared('shared-test-2'))
backend.delete()
def test_domain_is_authorized(self):
backend1 = SQLTestSMSBackend.objects.create(
name='BACKEND1',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
backend2 = SQLTestSMSBackend.objects.create(
name='BACKEND2',
domain='auth-test-1',
is_global=False,
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
self.assertTrue(backend1.domain_is_authorized('auth-test-1'))
self.assertTrue(backend1.domain_is_authorized('auth-test-2'))
self.assertTrue(backend1.domain_is_authorized('auth-test-3'))
self.assertTrue(backend2.domain_is_authorized('auth-test-1'))
self.assertFalse(backend2.domain_is_authorized('auth-test-2'))
self.assertFalse(backend2.domain_is_authorized('auth-test-3'))
backend2.set_shared_domains(['auth-test-2'])
self.assertTrue(backend2.domain_is_authorized('auth-test-1'))
self.assertTrue(backend2.domain_is_authorized('auth-test-2'))
self.assertFalse(backend2.domain_is_authorized('auth-test-3'))
backend1.delete()
backend2.delete()
def test_load_default_by_phone_and_domain(self):
backend1 = SQLTestSMSBackend.objects.create(
name='BACKEND1',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
backend2 = SQLTestSMSBackend.objects.create(
name='BACKEND2',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
backend3 = SQLTestSMSBackend.objects.create(
name='BACKEND3',
is_global=False,
domain='load-default-test',
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
backend4 = SQLTestSMSBackend.objects.create(
name='BACKEND4',
is_global=False,
domain='load-default-test',
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
SQLMobileBackendMapping.objects.create(
is_global=True,
backend_type=SQLMobileBackend.SMS,
prefix='*',
backend=backend1
)
SQLMobileBackendMapping.objects.create(
is_global=True,
backend_type=SQLMobileBackend.SMS,
prefix='27',
backend=backend2
)
SQLMobileBackendMapping.objects.create(
is_global=False,
domain='load-default-test',
backend_type=SQLMobileBackend.SMS,
prefix='*',
backend=backend3
)
SQLMobileBackendMapping.objects.create(
is_global=False,
domain='load-default-test',
backend_type=SQLMobileBackend.SMS,
prefix='27',
backend=backend4
)
# Test global prefix map
self.assertBackendsEqual(
SQLMobileBackend.load_default_by_phone_and_domain(
SQLMobileBackend.SMS,
'2700000000',
domain='load-default-test-2'
),
backend2
)
# Test domain-level prefix map
self.assertBackendsEqual(
SQLMobileBackend.load_default_by_phone_and_domain(
SQLMobileBackend.SMS,
'2700000000',
domain='load-default-test'
),
backend4
)
# Test domain catch-all
backend4.soft_delete()
self.assertBackendsEqual(
SQLMobileBackend.load_default_by_phone_and_domain(
SQLMobileBackend.SMS,
'2700000000',
domain='load-default-test'
),
backend3
)
# Test global prefix map
backend3.soft_delete()
self.assertBackendsEqual(
SQLMobileBackend.load_default_by_phone_and_domain(
SQLMobileBackend.SMS,
'2700000000',
domain='load-default-test'
),
backend2
)
# Test global catch-all
backend2.soft_delete()
self.assertBackendsEqual(
SQLMobileBackend.load_default_by_phone_and_domain(
SQLMobileBackend.SMS,
'2700000000',
domain='load-default-test'
),
backend1
)
# Test raising exception if nothing found
backend1.soft_delete()
with self.assertRaises(BadSMSConfigException):
SQLMobileBackend.load_default_by_phone_and_domain(
SQLMobileBackend.SMS,
'2700000000',
domain='load-default-test'
)
backend1.delete()
backend2.delete()
backend3.delete()
backend4.delete()
def test_get_backend_api_id(self):
backend = SQLTestSMSBackend.objects.create(
name='BACKEND',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
self.assertEquals(
SQLMobileBackend.get_backend_api_id(backend.pk),
SQLTestSMSBackend.get_api_id()
)
self.assertEquals(
SQLMobileBackend.get_backend_api_id(backend.couch_id, is_couch_id=True),
SQLTestSMSBackend.get_api_id()
)
backend.soft_delete()
with self.assertRaises(SQLMobileBackend.DoesNotExist):
SQLMobileBackend.get_backend_api_id(backend.pk)
with self.assertRaises(SQLMobileBackend.DoesNotExist):
SQLMobileBackend.get_backend_api_id(backend.couch_id, is_couch_id=True)
backend.delete()
def test_load(self):
backend = SQLTestSMSBackend.objects.create(
name='BACKEND',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
self.assertBackendsEqual(
SQLMobileBackend.load(backend.pk),
backend
)
self.assertBackendsEqual(
SQLMobileBackend.load(backend.pk, api_id=SQLTestSMSBackend.get_api_id()),
backend
)
self.assertBackendsEqual(
SQLMobileBackend.load(backend.couch_id, is_couch_id=True),
backend
)
self.assertBackendsEqual(
SQLMobileBackend.load(
backend.couch_id,
api_id=SQLTestSMSBackend.get_api_id(),
is_couch_id=True
),
backend
)
backend.soft_delete()
with self.assertRaises(SQLMobileBackend.DoesNotExist):
SQLMobileBackend.load(backend.pk, api_id=SQLTestSMSBackend.get_api_id())
with self.assertRaises(SQLMobileBackend.DoesNotExist):
SQLMobileBackend.load(
backend.couch_id,
api_id=SQLTestSMSBackend.get_api_id(),
is_couch_id=True
)
with self.assertRaises(BadSMSConfigException):
SQLMobileBackend.load(backend.pk, api_id='this-api-id-does-not-exist')
backend.delete()
def test_load_by_name(self):
backend1 = SQLTestSMSBackend.objects.create(
name='BACKEND_BY_NAME_TEST',
is_global=False,
domain='backend-by-name-test-1',
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
backend2 = SQLTestSMSBackend.objects.create(
name='BACKEND_BY_NAME_TEST',
is_global=False,
domain='backend-by-name-test-2',
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
backend2.set_shared_domains(['backend-by-name-test-1'])
backend3 = SQLTestSMSBackend.objects.create(
name='BACKEND_BY_NAME_TEST',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
self.assertBackendsEqual(
SQLMobileBackend.load_by_name(
SQLMobileBackend.SMS,
'backend-by-name-test-1',
'BACKEND_BY_NAME_TEST'
),
backend1
)
self.assertBackendsEqual(
SQLMobileBackend.load_by_name(
SQLMobileBackend.SMS,
'backend-by-name-test-3',
'BACKEND_BY_NAME_TEST'
),
backend3
)
backend1.soft_delete()
self.assertBackendsEqual(
SQLMobileBackend.load_by_name(
SQLMobileBackend.SMS,
'backend-by-name-test-1',
'BACKEND_BY_NAME_TEST'
),
backend2
)
backend2.set_shared_domains([])
self.assertBackendsEqual(
SQLMobileBackend.load_by_name(
SQLMobileBackend.SMS,
'backend-by-name-test-1',
'BACKEND_BY_NAME_TEST'
),
backend3
)
self.assertBackendsEqual(
SQLMobileBackend.load_by_name(
SQLMobileBackend.SMS,
'backend-by-name-test-2',
'BACKEND_BY_NAME_TEST'
),
backend2
)
backend2.soft_delete()
self.assertBackendsEqual(
SQLMobileBackend.load_by_name(
SQLMobileBackend.SMS,
'backend-by-name-test-2',
'BACKEND_BY_NAME_TEST'
),
backend3
)
backend3.soft_delete()
with self.assertRaises(BadSMSConfigException):
SQLMobileBackend.load_by_name(
SQLMobileBackend.SMS,
'backend-by-name-test-1',
'BACKEND_BY_NAME_TEST'
)
backend1.delete()
backend2.delete()
backend3.delete()
class LoadBalanceBackend(SQLTestSMSBackend, PhoneLoadBalancingMixin):
class Meta:
proxy = True
@classmethod
def get_api_id(cls):
return 'LOAD_BALANCE'
class RateLimitBackend(SQLTestSMSBackend):
class Meta:
proxy = True
def get_sms_rate_limit(self):
return 10
@classmethod
def get_api_id(cls):
return 'RATE_LIMIT'
class LoadBalanceAndRateLimitBackend(SQLTestSMSBackend, PhoneLoadBalancingMixin):
class Meta:
proxy = True
def get_sms_rate_limit(self):
return 10
@classmethod
def get_api_id(cls):
return 'LOAD_BALANCE_RATE_LIMIT'
def mock_get_backend_classes():
return {
LoadBalanceBackend.get_api_id(): LoadBalanceBackend,
RateLimitBackend.get_api_id(): RateLimitBackend,
LoadBalanceAndRateLimitBackend.get_api_id(): LoadBalanceAndRateLimitBackend,
}
@patch('corehq.apps.sms.util.get_backend_classes', new=mock_get_backend_classes)
class LoadBalancingAndRateLimitingTestCase(BaseSMSTest):
def setUp(self):
super(LoadBalancingAndRateLimitingTestCase, self).setUp()
self.domain = 'load-balance-rate-limit'
self.domain_obj = Domain(name=self.domain)
self.domain_obj.save()
self.create_account_and_subscription(self.domain)
self.domain_obj = Domain.get(self.domain_obj.get_id)
def tearDown(self):
QueuedSMS.objects.all().delete()
self.domain_obj.delete()
super(LoadBalancingAndRateLimitingTestCase, self).tearDown()
def create_outgoing_sms(self, backend):
sms = QueuedSMS(
domain=self.domain,
date=datetime.utcnow(),
direction='O',
phone_number='9991234567',
text='message',
backend_id=backend.couch_id
)
sms.save()
return sms
def delete_load_balancing_keys(self, backend):
# This should only be necessary when running tests locally, but doesn't
# hurt to run all the time.
client = get_redis_client().client.get_client()
client.delete(backend.get_load_balance_redis_key())
def assertRequeue(self, backend):
requeue_flag = handle_outgoing(self.create_outgoing_sms(backend))
self.assertTrue(requeue_flag)
def assertNotRequeue(self, backend):
requeue_flag = handle_outgoing(self.create_outgoing_sms(backend))
self.assertFalse(requeue_flag)
def test_load_balance(self):
backend = LoadBalanceBackend.objects.create(
name='BACKEND',
is_global=True,
load_balancing_numbers=['9990001', '9990002', '9990003'],
hq_api_id=LoadBalanceBackend.get_api_id()
)
self.delete_load_balancing_keys(backend)
for i in range(5):
with patch('corehq.apps.sms.tests.test_backends.LoadBalanceBackend.send') as mock_send:
self.assertNotRequeue(backend)
self.assertTrue(mock_send.called)
self.assertEqual(mock_send.call_args[1]['orig_phone_number'], '9990001')
self.assertNotRequeue(backend)
self.assertTrue(mock_send.called)
self.assertEqual(mock_send.call_args[1]['orig_phone_number'], '9990002')
self.assertNotRequeue(backend)
self.assertTrue(mock_send.called)
self.assertEqual(mock_send.call_args[1]['orig_phone_number'], '9990003')
backend.delete()
def test_rate_limit(self):
backend = RateLimitBackend.objects.create(
name='BACKEND',
is_global=True,
hq_api_id=RateLimitBackend.get_api_id()
)
# Requeue flag should be False until we hit the limit
for i in range(backend.get_sms_rate_limit()):
with patch('corehq.apps.sms.tests.test_backends.RateLimitBackend.send') as mock_send:
self.assertNotRequeue(backend)
self.assertTrue(mock_send.called)
# Requeue flag should be True after hitting the limit
with patch('corehq.apps.sms.tests.test_backends.RateLimitBackend.send') as mock_send:
self.assertRequeue(backend)
self.assertFalse(mock_send.called)
backend.delete()
def test_load_balance_and_rate_limit(self):
backend = LoadBalanceAndRateLimitBackend.objects.create(
name='BACKEND',
is_global=True,
load_balancing_numbers=['9990001', '9990002', '9990003'],
hq_api_id=LoadBalanceAndRateLimitBackend.get_api_id()
)
self.delete_load_balancing_keys(backend)
for i in range(backend.get_sms_rate_limit()):
with patch('corehq.apps.sms.tests.test_backends.LoadBalanceAndRateLimitBackend.send') as mock_send:
self.assertNotRequeue(backend)
self.assertTrue(mock_send.called)
self.assertEqual(mock_send.call_args[1]['orig_phone_number'], '9990001')
self.assertNotRequeue(backend)
self.assertTrue(mock_send.called)
self.assertEqual(mock_send.call_args[1]['orig_phone_number'], '9990002')
self.assertNotRequeue(backend)
self.assertTrue(mock_send.called)
self.assertEqual(mock_send.call_args[1]['orig_phone_number'], '9990003')
with patch('corehq.apps.sms.tests.test_backends.LoadBalanceAndRateLimitBackend.send') as mock_send:
self.assertRequeue(backend)
self.assertFalse(mock_send.called)
self.assertRequeue(backend)
self.assertFalse(mock_send.called)
self.assertRequeue(backend)
self.assertFalse(mock_send.called)
backend.delete()
class SQLMobileBackendMappingTestCase(TestCase):
def test_backend_map(self):
backend_map = BackendMap(
1, {
'1': 2,
'27': 3,
'256': 4,
'25670': 5,
'25675': 6,
}
)
self.assertEqual(backend_map.get_backend_id_by_prefix('910000000'), 1)
self.assertEqual(backend_map.get_backend_id_by_prefix('100000000'), 2)
self.assertEqual(backend_map.get_backend_id_by_prefix('200000000'), 1)
self.assertEqual(backend_map.get_backend_id_by_prefix('250000000'), 1)
self.assertEqual(backend_map.get_backend_id_by_prefix('270000000'), 3)
self.assertEqual(backend_map.get_backend_id_by_prefix('256000000'), 4)
self.assertEqual(backend_map.get_backend_id_by_prefix('256700000'), 5)
self.assertEqual(backend_map.get_backend_id_by_prefix('256750000'), 6)
def assertNoDomainDefaultBackend(self, domain):
self.assertEqual(
SQLMobileBackendMapping.objects.filter(domain=domain).count(),
0
)
def assertDomainDefaultBackend(self, domain, backend):
mapping = SQLMobileBackendMapping.objects.get(domain=domain)
self.assertFalse(mapping.is_global)
self.assertEqual(mapping.domain, domain)
self.assertEqual(mapping.backend_type, SQLMobileBackend.SMS)
self.assertEqual(mapping.prefix, '*')
self.assertEqual(mapping.backend_id, backend.pk)
def test_set_default_domain_backend(self):
backend1 = SQLTestSMSBackend.objects.create(
name='BACKEND1',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
backend2 = SQLTestSMSBackend.objects.create(
name='BACKEND2',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
domain = 'domain-default-backend-test'
self.assertNoDomainDefaultBackend(domain)
SQLMobileBackendMapping.set_default_domain_backend(domain, backend1)
self.assertDomainDefaultBackend(domain, backend1)
SQLMobileBackendMapping.set_default_domain_backend(domain, backend2)
self.assertDomainDefaultBackend(domain, backend2)
SQLMobileBackendMapping.unset_default_domain_backend(domain)
self.assertNoDomainDefaultBackend(domain)
backend1.delete()
backend2.delete()
def test_get_prefix_to_backend_map(self):
backend1 = SQLTestSMSBackend.objects.create(
name='BACKEND1',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
backend2 = SQLTestSMSBackend.objects.create(
name='BACKEND2',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
backend3 = SQLTestSMSBackend.objects.create(
name='BACKEND3',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
backend4 = SQLTestSMSBackend.objects.create(
name='BACKEND4',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
backend5 = SQLTestSMSBackend.objects.create(
name='BACKEND5',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
backend6 = SQLTestSMSBackend.objects.create(
name='BACKEND6',
is_global=True,
hq_api_id=SQLTestSMSBackend.get_api_id(),
)
backend_mapping1 = SQLMobileBackendMapping.objects.create(
is_global=True,
backend_type=SQLMobileBackend.SMS,
prefix='*',
backend=backend1
)
backend_mapping2 = SQLMobileBackendMapping.objects.create(
is_global=True,
backend_type=SQLMobileBackend.SMS,
prefix='27',
backend=backend2
)
backend_mapping3 = SQLMobileBackendMapping.objects.create(
is_global=True,
backend_type=SQLMobileBackend.SMS,
prefix='1',
backend=backend3
)
backend_mapping4 = SQLMobileBackendMapping.objects.create(
is_global=False,
domain='prefix-backend-map-test',
backend_type=SQLMobileBackend.SMS,
prefix='*',
backend=backend4
)
backend_mapping5 = SQLMobileBackendMapping.objects.create(
is_global=False,
domain='prefix-backend-map-test',
backend_type=SQLMobileBackend.SMS,
prefix='256',
backend=backend5
)
backend_mapping6 = SQLMobileBackendMapping.objects.create(
is_global=False,
domain='prefix-backend-map-test',
backend_type=SQLMobileBackend.SMS,
prefix='25670',
backend=backend6
)
global_backend_map = SQLMobileBackendMapping.get_prefix_to_backend_map(SQLMobileBackend.SMS)
self.assertEqual(global_backend_map.catchall_backend_id, backend1.pk)
self.assertEqual(global_backend_map.backend_map_dict, {
'27': backend2.pk,
'1': backend3.pk,
})
domain_backend_map = SQLMobileBackendMapping.get_prefix_to_backend_map(
SQLMobileBackend.SMS,
domain='prefix-backend-map-test'
)
self.assertEqual(domain_backend_map.catchall_backend_id, backend4.pk)
self.assertEqual(domain_backend_map.backend_map_dict, {
'256': backend5.pk,
'25670': backend6.pk,
})
backend_mapping1.delete()
backend_mapping2.delete()
backend_mapping3.delete()
backend_mapping4.delete()
backend_mapping5.delete()
backend_mapping6.delete()
backend1.delete()
backend2.delete()
backend3.delete()
backend4.delete()
backend5.delete()
backend6.delete()
| qedsoftware/commcare-hq | corehq/apps/sms/tests/test_backends.py | Python | bsd-3-clause | 52,667 |